Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
F
FruitBin
Manage
Activity
Members
Labels
Plan
Issues
0
Issue boards
Milestones
Wiki
Code
Merge requests
0
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Guillaume Duret
FruitBin
Commits
364f1608
Commit
364f1608
authored
1 year ago
by
Guillaume Duret
Browse files
Options
Downloads
Patches
Plain Diff
add interval for occlusion
parent
951551b9
No related branches found
No related tags found
No related merge requests found
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
compute_features.py
+4
-3
4 additions, 3 deletions
compute_features.py
main.py
+6
-4
6 additions, 4 deletions
main.py
slurm/process_dataset.slurm
+10
-8
10 additions, 8 deletions
slurm/process_dataset.slurm
utils.py
+4
-3
4 additions, 3 deletions
utils.py
with
24 additions
and
18 deletions
compute_features.py
+
4
−
3
View file @
364f1608
...
...
@@ -12,7 +12,7 @@ from fps_alg import process2
import
os
from
PIL
import
Image
def
process_compute
(
data_path
,
full_path
,
camera
,
camera_resized
,
new_size
,
Nb_camera
,
World_begin
,
Nb_world
,
list_categories
,
occ_target
,
vis
):
def
process_compute
(
data_path
,
full_path
,
camera
,
camera_resized
,
new_size
,
Nb_camera
,
World_begin
,
Nb_world
,
list_categories
,
occ_target
_min
,
occ_target_max
=
1
,
vis
=
False
):
transformation
=
np
.
matrix
([[
0.0000000
,
-
1.0000000
,
0.0000000
],
[
0.0000000
,
0.0000000
,
-
1.0000000
],
[
1.0000000
,
0.0000000
,
0.0000000
]])
...
...
@@ -68,7 +68,7 @@ def process_compute(data_path, full_path, camera, camera_resized, new_size, Nb_c
else
:
destination_folders
[
"
Mix_all
"
]
=
f
"
Generated_Mix_all_dont_save
"
categories_array_filtered
,
categories_array_filtered_occ
,
categories_array_all
,
categories_array_all_occ
=
compute_id_good_occ
(
data_path
,
p
,
categories_instance_array_id_to_cat
,
categories_instance_array_cat_to_id
,
occ_target
)
categories_array_filtered
,
categories_array_filtered_occ
,
categories_array_all
,
categories_array_all_occ
=
compute_id_good_occ
(
data_path
,
p
,
categories_instance_array_id_to_cat
,
categories_instance_array_cat_to_id
,
occ_target
_min
,
occ_target_max
)
### 3D Poses ###
with
open
(
f
'
{
data_path
}
/Pose/
{
p
}
.json
'
,
'
r
'
)
as
f
:
...
...
@@ -146,7 +146,8 @@ def process_compute(data_path, full_path, camera, camera_resized, new_size, Nb_c
meta
[
'
Nb_instance
'
]
=
Nb_instance_all
meta
[
'
all_id_instance
'
]
=
categories_array_all
[
categories
]
meta
[
'
all_id_instance_occlusion
'
]
=
categories_array_all_occ
[
categories
]
meta
[
'
target_occlusion
'
]
=
occ_target
meta
[
'
target_occlusion_min
'
]
=
occ_target_min
meta
[
'
target_occlusion_max
'
]
=
occ_target_max
meta
[
'
Nb_instance_filtered
'
]
=
Nb_instance_occ
meta
[
'
id_instance_good
'
]
=
categories_array_filtered
[
categories
]
meta
[
'
id_instance_good_occlusion
'
]
=
categories_array_filtered_occ
[
categories
]
...
...
This diff is collapsed.
Click to expand it.
main.py
+
6
−
4
View file @
364f1608
...
...
@@ -66,7 +66,8 @@ if __name__ == '__main__':
parser
.
add_argument
(
'
--Nb_worlds
'
,
type
=
int
,
required
=
True
)
parser
.
add_argument
(
'
--World_begin
'
,
type
=
int
,
required
=
True
)
parser
.
add_argument
(
'
--dataset_id
'
,
type
=
str
,
default
=
''
,
required
=
True
)
parser
.
add_argument
(
'
--occlusion_target
'
,
type
=
float
,
default
=
''
,
required
=
True
)
parser
.
add_argument
(
'
--occlusion_target_min
'
,
type
=
float
,
default
=
''
,
required
=
True
)
parser
.
add_argument
(
'
--occlusion_target_max
'
,
type
=
float
,
default
=
''
,
required
=
True
)
#parser.add_argument('--rearrange', dest='rearrange', default=False, action='store_true')
#parser.add_argument('--compute', dest='compute', default=False, action='store_true')
parser
.
add_argument
(
'
--rearrange
'
,
type
=
str
,
default
=
'
no
'
,
required
=
True
)
...
...
@@ -79,7 +80,8 @@ if __name__ == '__main__':
### parameters ###
Categories
=
[]
# to read
Nb_instance
=
1
occ_target
=
args
.
occlusion_target
occ_target_min
=
args
.
occlusion_target_min
occ_target_max
=
args
.
occlusion_target_max
dataset_src
=
f
"
/gpfsscratch/rech/uli/ubn15wo/DATA/data
{
args
.
dataset_id
}
"
#dataset_src = "/media/gduret/DATA/dataset/s2rg/Fruits_all_medium/data"
...
...
@@ -90,7 +92,7 @@ if __name__ == '__main__':
dataset_type
=
data_options
[
choice
]
dataset_path
=
f
"
/gpfsscratch/rech/uli/ubn15wo/FruitBin
{
args
.
dataset_id
}
"
#GUIMOD_New_{choice}_{args.dataset_id}"
#dataset_path = f"/home/gduret/Documents/FruitBin{args.dataset_id}/"
dataset_name
=
f
"
FruitBin_
{
choice
}
_
{
Nb_instance
}
_
{
occ_target
}
"
dataset_name
=
f
"
FruitBin_
{
choice
}
_
{
Nb_instance
}
_
{
occ_target
_min
}
_
{
occ_target_max
}
"
#dataset_name = f"/gpfsscratch/rech/uli/ubn15wo/dataset_new{args.dataset_id}/s2rg/Fruits_all_medium/GUIMOD_{choice}"
list_categories
=
[
"
banana1
"
,
"
kiwi1
"
,
"
pear2
"
,
"
apricot
"
,
"
orange2
"
,
"
peach1
"
,
"
lemon2
"
,
"
apple2
"
]
Nb_camera
=
15
...
...
@@ -155,5 +157,5 @@ if __name__ == '__main__':
np
.
savetxt
(
f
'
{
dataset_path
}
/
{
dataset_name
}
/Generated/
{
categories
}
/
{
categories
}
_bbox_3d.txt
'
,
bbox
)
# save
if
args
.
compute
==
'
yes
'
:
process_compute
(
dataset_path
,
dataset_path
+
'
/
'
+
dataset_name
,
camera
,
new_camera
,
new_size
,
Nb_camera
,
args
.
World_begin
,
args
.
Nb_worlds
,
list_categories
,
occ_target
,
False
)
process_compute
(
dataset_path
,
dataset_path
+
'
/
'
+
dataset_name
,
camera
,
new_camera
,
new_size
,
Nb_camera
,
args
.
World_begin
,
args
.
Nb_worlds
,
list_categories
,
occ_target
_min
,
occ_target_max
,
False
)
This diff is collapsed.
Click to expand it.
slurm/process_dataset.slurm
+
10
−
8
View file @
364f1608
#!/bin/bash
#SBATCH --job-name=
Software
# nom du job
#SBATCH --job-name=
Process
# nom du job
#SBATCH -A uli@cpu
#SBATCH --ntasks=1 # Nombre total de processus MPI
#SBATCH --ntasks-per-node=1 # Nombre de processus MPI par noeud
#SBATCH --cpus-per-task=2
#SBATCH --partition=cpu_p1
#SBATCH --qos=qos_cpu-t
4
#SBATCH --qos=qos_cpu-t
3
# /!\ Attention, la ligne suivante est trompeuse mais dans le vocabulaire
# de Slurm "multithread" fait bien référence à l'hyperthreading.
#SBATCH --hint=nomultithread # 1 processus MPI par coeur physique (pas d'hyperthreading)
#SBATCH --time=
35
:00:00 # Temps d’exécution maximum demande (HH:MM:SS)
#SBATCH --output=Output/postprocess%j.out # Nom du fichier de sortie
#SBATCH --error=Output/postprocess%j.out # Nom du fichier d'erreur (ici commun avec la sortie)
#SBATCH --time=
20
:00:00 # Temps d’exécution maximum demande (HH:MM:SS)
#SBATCH --output=Output/
q
postprocess%j.out # Nom du fichier de sortie
#SBATCH --error=Output/
q
postprocess%j.out # Nom du fichier d'erreur (ici commun avec la sortie)
# on se place dans le répertoire de soumission
cd
${
SLURM_SUBMIT_DIR
}
...
...
@@ -31,10 +31,12 @@ set -x
id_begin
=
$1
Nb
=
$2
id_dataset
=
$3
rearrange
=
$4
compute
=
$5
occlusion_min
=
$4
occlusion_max
=
$5
rearrange
=
$6
compute
=
$7
conda activate stvNet
time
(
python main.py
--World_begin
=
"
$id_begin
"
--Nb_world
=
"
$Nb
"
--dataset_id
=
"
$id_dataset
"
--rearrange
=
$rearrange
--compute
=
$compute
)
time
(
python main.py
--World_begin
=
"
$id_begin
"
--Nb_world
=
"
$Nb
"
--dataset_id
=
"
$id_dataset
"
--occlusion_target_min
=
$occlusion_min
--occlusion_target_max
=
$occlusion_max
--rearrange
=
$rearrange
--compute
=
$compute
)
This diff is collapsed.
Click to expand it.
utils.py
+
4
−
3
View file @
364f1608
...
...
@@ -9,6 +9,7 @@ def compute_categories_id(data_name, world):
# returns JSON object as
# a dictionary
print
(
f
'
{
data_name
}
/Meta/
{
world
}
.json
'
)
data
=
json
.
load
(
f
)
# Iterating through the json
...
...
@@ -35,7 +36,7 @@ def compute_categories_id(data_name, world):
def
compute_id_good_occ
(
data_name
,
count
,
categories_instance_array_id_to_cat
,
categories_instance_array_cat_to_id
,
O
cc_
wanted
):
def
compute_id_good_occ
(
data_name
,
count
,
categories_instance_array_id_to_cat
,
categories_instance_array_cat_to_id
,
o
cc_
target_min
,
occ_target_max
):
f2
=
open
(
f
'
{
data_name
}
/Occlusion/
{
count
}
.json
'
)
...
...
@@ -53,7 +54,7 @@ def compute_id_good_occ(data_name, count, categories_instance_array_id_to_cat, c
categories_array_all_occ
[
cat
]
=
[]
for
i
in
data2
:
if
i
[
'
occlusion_value
'
]
>
=
Occ_wanted
:
if
i
[
'
occlusion_value
'
]
>
occ_target_min
and
i
[
'
occlusion_value
'
]
<=
occ_target_max
:
categories_array_filtered
[
categories_instance_array_id_to_cat
[
i
[
'
id
'
]]].
append
(
i
[
'
id
'
])
categories_array_filtered_occ
[
categories_instance_array_id_to_cat
[
i
[
'
id
'
]]].
append
(
i
[
'
occlusion_value
'
])
if
i
[
'
occlusion_value
'
]
>=
0.05
:
...
...
@@ -69,4 +70,4 @@ def compute_id_good_occ(data_name, count, categories_instance_array_id_to_cat, c
#print(categories_array_all_occ)
return
categories_array_filtered
,
categories_array_filtered_occ
,
categories_array_all
,
categories_array_all_occ
\ No newline at end of file
return
categories_array_filtered
,
categories_array_filtered_occ
,
categories_array_all
,
categories_array_all_occ
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment