22import importlib
33import json
44from posixpath import join
5+ import re
56import time
67import requests
78import os
2425
2526VALID_IMAGE_EXTENSIONS = ['.tiff' , '.tif' , '.png' , '.jpeg' , '.jpg' ]
2627
28+ # Regex pattern for valid tag format: "org" or "org/dataset" with lowercase alphanumeric and hyphens
29+ TAG_PATTERN = re .compile (r'^[a-z0-9][a-z0-9\-]*(/[a-z0-9][a-z0-9\-]*)?$' )
30+
2731def is_valid (file ):
2832 _ , file_extension = path .splitext (file )
2933 return file_extension .lower () in VALID_IMAGE_EXTENSIONS or file == 'gcp_list.txt' or file == 'geo.txt'
3034
3135def get_settings (request ):
3236 ds = get_current_plugin ().get_user_data_store (request .user )
33-
37+
3438 registry_url = ds .get_string ('registry_url' ) or DEFAULT_HUB_URL
3539 username = ds .get_string ('username' ) or None
3640 password = ds .get_string ('password' ) or None
3741 token = ds .get_string ('token' ) or None
3842
3943 return registry_url , username , password , token
40-
44+
4145
4246def update_token (request , token ):
4347 ds = get_current_plugin ().get_user_data_store (request .user )
@@ -70,7 +74,7 @@ def post(self, request):
7074
7175 ddb = DroneDB (hub_url , username , password )
7276
73- return Response ({'success' : ddb .login ()}, status = status .HTTP_200_OK )
77+ return Response ({'success' : ddb .login ()}, status = status .HTTP_200_OK )
7478
7579 except (Exception ) as e :
7680 return Response ({'error' : str (e )}, status = status .HTTP_400_BAD_REQUEST )
@@ -137,36 +141,36 @@ def post(self, request):
137141
138142 try :
139143
140- result , org , ds , folder , count , size = verify_url (url , username , password ).values ()
144+ result , org , ds , folder , count , size = verify_url (url , username , password ).values ()
141145
142146 if (not result ):
143- return Response ({'error' : 'Invalid url.' }, status = status .HTTP_400_BAD_REQUEST )
147+ return Response ({'error' : 'Invalid url.' }, status = status .HTTP_400_BAD_REQUEST )
144148
145- return Response ({'count' : count , 'success' : result , 'ds' : ds , 'org' : org , 'folder' : folder or None , 'size' : size }
149+ return Response ({'count' : count , 'success' : result , 'ds' : ds , 'org' : org , 'folder' : folder or None , 'size' : size }
146150 if org else {'success' : False }, status = status .HTTP_200_OK )
147151
148152 except Exception as e :
149- return Response ({'error' : str (e )}, status = status .HTTP_400_BAD_REQUEST )
153+ return Response ({'error' : str (e )}, status = status .HTTP_400_BAD_REQUEST )
150154
151155class InfoTaskView (TaskView ):
152156 def get (self , request ):
153-
157+
154158 registry_url , username , _ , _ = get_settings (request )
155-
159+
156160 return Response ({ 'hubUrl' : registry_url , 'username' : username }, status = status .HTTP_200_OK )
157-
161+
158162
159163class ImportDatasetTaskView (TaskView ):
160164 def post (self , request , project_pk = None , pk = None ):
161-
165+
162166 task = self .get_and_check_task (request , pk )
163167
164168 # Read form data
165169 ddb_url = request .data .get ('ddb_url' , None )
166-
170+
167171 if ddb_url == None :
168172 return Response ({'error' : 'DroneDB url must be set.' }, status = status .HTTP_400_BAD_REQUEST )
169-
173+
170174 registry_url , orgSlug , dsSlug , folder = parse_url (ddb_url ).values ()
171175
172176 _ , username , password , token = get_settings (request )
@@ -175,27 +179,27 @@ def post(self, request, project_pk=None, pk=None):
175179 # Get the files from the folder
176180 rawfiles = ddb .get_files_list (orgSlug , dsSlug , folder )
177181 files = [file for file in rawfiles if is_valid (file ['path' ])]
178-
179- # Verify that the folder url is valid
182+
183+ # Verify that the folder url is valid
180184 if len (files ) == 0 :
181185 return Response ({'error' : 'Empty dataset or folder.' }, status = status .HTTP_400_BAD_REQUEST )
182-
186+
183187 # Update the task with the new information
184188 task .console += "Importing {} images...\n " .format (len (files ))
185189 task .images_count = len (files )
186190 task .pending_action = pending_actions .IMPORT
187191 task .save ()
188-
192+
189193 # Associate the folder url with the project and task
190194 combined_id = "{}_{}" .format (project_pk , pk )
191-
195+
192196 datastore = get_current_plugin ().get_global_data_store ()
193197 datastore .set_json (combined_id , {
194- "ddbUrl" : ddb_url ,
195- "token" : ddb .token ,
198+ "ddbUrl" : ddb_url ,
199+ "token" : ddb .token ,
196200 "ddbWebUrl" : "{}/r/{}/{}/{}" .format (to_web_protocol (registry_url ), orgSlug , dsSlug , folder .rstrip ('/' ))
197201 })
198-
202+
199203 #ddb.refresh_token()
200204
201205 # Start importing the files in the background
@@ -211,7 +215,7 @@ def import_files(task_id, carrier):
211215 from app .security import path_traversal_check
212216
213217 files = carrier ['files' ]
214-
218+
215219 headers = {}
216220
217221 if carrier ['token' ] != None :
@@ -225,15 +229,15 @@ def download_file(task, file):
225229 with open (path , 'wb' ) as fd :
226230 for chunk in download_stream .iter_content (4096 ):
227231 fd .write (chunk )
228-
232+
229233 logger .info ("Will import {} files" .format (len (files )))
230234 task = models .Task .objects .get (pk = task_id )
231235 task .create_task_directories ()
232236 task .save ()
233-
237+
234238 try :
235239 downloaded_total = 0
236- for file in files :
240+ for file in files :
237241 download_file (task , file )
238242 task .check_if_canceled ()
239243 models .Task .objects .filter (pk = task .id ).update (upload_progress = (float (downloaded_total ) / float (len (files ))))
@@ -286,7 +290,7 @@ def ddb_cleanup(sender, task_id, **kwargs):
286290 logger .info ("Info task {0} ({1})" .format (str (task_id ), status_key ))
287291
288292 datastore .del_key (status_key )
289-
293+
290294
291295class StatusTaskView (TaskView ):
292296 def get (self , request , pk ):
@@ -295,7 +299,7 @@ def get(self, request, pk):
295299
296300 # Associate the folder url with the project and task
297301 status_key = get_status_key (pk )
298-
302+
299303 datastore = get_current_plugin ().get_global_data_store ()
300304
301305 task_info = datastore .get_json (status_key , {
@@ -311,7 +315,7 @@ def get(self, request, pk):
311315 return Response (task_info , status = status .HTTP_200_OK )
312316
313317DRONEDB_ASSETS = [
314- 'orthophoto.tif' ,
318+ 'orthophoto.tif' ,
315319 'orthophoto.png' ,
316320 'georeferenced_model.laz' ,
317321 'dtm.tif' ,
@@ -321,15 +325,31 @@ def get(self, request, pk):
321325 'ground_control_points.geojson'
322326]
323327
324- class ShareTaskView (TaskView ):
328+ class ShareTaskView (TaskView ):
325329 def post (self , request , pk ):
326330
327331 from app .plugins import logger
328332
329333 task = self .get_and_check_task (request , pk )
330334
335+ # Get optional tag and datasetName from request
336+ tag = request .data .get ('tag' , None )
337+ dataset_name = request .data .get ('datasetName' , None ) or task .name
338+
339+ # Validate tag format if provided
340+ if tag is not None and tag .strip ():
341+ tag = tag .strip ().lower ()
342+ if not TAG_PATTERN .match (tag ):
343+ return Response ({
344+ 'error' : 'Invalid tag format. Must be "org" or "org/dataset" with lowercase alphanumeric characters and hyphens only.'
345+ }, status = status .HTTP_400_BAD_REQUEST )
346+
347+ # Sanitize dataset_name (remove potentially dangerous characters)
348+ if dataset_name :
349+ dataset_name = dataset_name .strip ()[:255 ] # Limit length
350+
331351 status_key = get_status_key (pk )
332-
352+
333353 datastore = get_current_plugin ().get_global_data_store ()
334354
335355 data = {
@@ -348,33 +368,33 @@ def post(self, request, pk):
348368
349369 available_assets = [task .get_asset_file_or_stream (f ) for f in list (set (task .available_assets ) & set (DRONEDB_ASSETS ))]
350370
351- if 'textured_model.zip' in task .available_assets :
371+ if 'textured_model.zip' in task .available_assets :
352372 texture_files = [join (task .assets_path ('odm_texturing' ), f ) for f in listdir (task .assets_path ('odm_texturing' )) if isfile (join (task .assets_path ('odm_texturing' ), f ))]
353373 available_assets .extend (texture_files )
354374
355375 assets_path = task .assets_path ()
356376
357377 files = [{'path' : f , 'name' : f [len (assets_path )+ 1 :], 'size' : os .path .getsize (f )} for f in available_assets ]
358378
359- share_to_ddb .delay (pk , settings , files )
379+ share_to_ddb .delay (pk , settings , files , tag , dataset_name )
360380
361- return Response (data , status = status .HTTP_200_OK )
381+ return Response (data , status = status .HTTP_200_OK )
362382
363383
364384@task
365- def share_to_ddb (pk , settings , files ):
366-
385+ def share_to_ddb (pk , settings , files , tag = None , dataset_name = None ):
386+
367387 from app .plugins import logger
368-
369- status_key = get_status_key (pk )
388+
389+ status_key = get_status_key (pk )
370390 datastore = get_current_plugin ().get_global_data_store ()
371391
372392 registry_url , username , password , token = settings
373-
393+
374394 ddb = DroneDB (registry_url , username , password , token )
375395
376- # Init share (to check)
377- share_token = ddb .share_init ()
396+ # Init share with optional tag and dataset name
397+ share_token = ddb .share_init (tag = tag , dataset_name = dataset_name )
378398
379399 status = datastore .get_json (status_key )
380400
@@ -394,9 +414,9 @@ def share_to_ddb(pk, settings, files):
394414
395415 while attempt < 3 :
396416 try :
397-
417+
398418 attempt += 1
399-
419+
400420 up = ddb .share_upload (share_token , file ['path' ], file ['name' ])
401421
402422 logger .info ("Uploaded " + file ['name' ] + " to Dronedb (hash: " + up ['hash' ] + ")" )
@@ -405,7 +425,7 @@ def share_to_ddb(pk, settings, files):
405425 status ['uploadedSize' ] += file ['size' ]
406426
407427 datastore .set_json (status_key , status )
408-
428+
409429 break
410430
411431 except Exception as e :
@@ -423,7 +443,7 @@ def share_to_ddb(pk, settings, files):
423443
424444
425445 res = ddb .share_commit (share_token )
426-
446+
427447 status ['status' ] = 3 # Done
428448 status ['shareUrl' ] = registry_url + res ['url' ]
429449
0 commit comments