Print

Print


Commit in java/sandbox/data-cat/src/main/python/hpsdatacat on MAIN
add_location.py+6-6666 -> 667
add_metadata.py+7-7666 -> 667
delete.py+2-2666 -> 667
find.py+3-3666 -> 667
register.py+10-10666 -> 667
+28-28
5 modified files
Fix names that I automatically screwed up with refactoring tool.

java/sandbox/data-cat/src/main/python/hpsdatacat
add_location.py 666 -> 667
--- java/sandbox/data-cat/src/main/python/hpsdatacat/add_location.py	2014-06-04 20:15:51 UTC (rev 666)
+++ java/sandbox/data-cat/src/main/python/hpsdatacat/add_location.py	2014-06-04 20:30:18 UTC (rev 667)
@@ -10,8 +10,8 @@
 
 # create the parser
 parser = create_base_argparser(__command)
-parser.add_argument('-p', '--logical_path', help='logical_path in data catalog where dataset is located', required=True)
-parser.add_argument('-n', '--name', help='original dataset name with no dataset_name extension', required=True)
+parser.add_argument('-p', '--path', help='logical folder in data catalog where dataset is located', required=True)
+parser.add_argument('-n', '--name', help='original dataset name (with no file extension)', required=True)
 parser.add_argument('-f', '--file', help='new physical file location', required=True)
 parser.add_argument('-g', '--group', help='dataset group', default=get_default_group())
 parser.add_argument('-s', '--site', help='new dataset site', default=get_default_site())
@@ -24,15 +24,15 @@
     connection = get_ssh_connection_string()    
     if connection == None:
         raise Exception("Couldn't figure out a connection to use!")
-logical_path = args['logical_path']
+logical_folder = args['path']
 dataset_name = args['name']
-dataset_name = args['file']
+file_path = args['file']
 group = args['group']
 site = args['site']
 check_valid_site(site)
 version = None    
 if args['version'] != None:
-    version = args['version']    
+    version = args['version']
 
 # build command line
 command_line = create_base_command_line(__command, connection, dry_run, mode)
@@ -42,7 +42,7 @@
     command_line += ' --site %s' % site
 if version != None:    
     command_line += ' --version %s' % version    
-command_line += ' %s %s %s' % (dataset_name, logical_path, dataset_name)
+command_line += ' %s %s %s' % (dataset_name, logical_folder, file_path)
 
 # run the command
 lines, errors, return_value = run_process(command_line)

java/sandbox/data-cat/src/main/python/hpsdatacat
add_metadata.py 666 -> 667
--- java/sandbox/data-cat/src/main/python/hpsdatacat/add_metadata.py	2014-06-04 20:15:51 UTC (rev 666)
+++ java/sandbox/data-cat/src/main/python/hpsdatacat/add_metadata.py	2014-06-04 20:30:18 UTC (rev 667)
@@ -21,12 +21,12 @@
 # handle standard arguments
 connection, dry_run, mode = handle_standard_arguments(args)
   
-# dataset_name
-dataset_name = args['name']
+# file_path
+file_path = args['name']
 group = args['group']    
-if dataset_name == None and group == None:
+if file_path == None and group == None:
     raise Exception("A dataset name or a group is required.")
-logical_path = args['path']
+logical_folder = args['path']
 version = args['version']
 
 # metadata    
@@ -38,14 +38,14 @@
 
 # build command line
 command_line = create_base_command_line(__command, connection, dry_run, mode)    
-if dataset_name != None:
-    command_line += ' --dataset %s' % dataset_name    
+if file_path != None:
+    command_line += ' --dataset %s' % file_path    
 if version != None:
     command_line.append += ' --version %s' % version
 if group != None:
     command_line += ' --group %s' % group
 command_line += ' %s' % metadata
-command_line += ' %s' % logical_path
+command_line += ' %s' % logical_folder
 
 # run the command
 lines, errors, return_value = run_process(command_line)

java/sandbox/data-cat/src/main/python/hpsdatacat
delete.py 666 -> 667
--- java/sandbox/data-cat/src/main/python/hpsdatacat/delete.py	2014-06-04 20:15:51 UTC (rev 666)
+++ java/sandbox/data-cat/src/main/python/hpsdatacat/delete.py	2014-06-04 20:30:18 UTC (rev 667)
@@ -15,11 +15,11 @@
 
 # process command line arguments
 connection, dry_run, mode = handle_standard_arguments(args)
-logical_path = args['path']
+logical_folder = args['path']
     
 # build command line
 command_line = create_base_command_line(__command, connection, dry_run, mode)
-command_line += ' --force %s' % logical_path
+command_line += ' --force %s' % logical_folder
 
 # run command line
 lines, errors, return_value = run_process(command_line)

java/sandbox/data-cat/src/main/python/hpsdatacat
find.py 666 -> 667
--- java/sandbox/data-cat/src/main/python/hpsdatacat/find.py	2014-06-04 20:15:51 UTC (rev 666)
+++ java/sandbox/data-cat/src/main/python/hpsdatacat/find.py	2014-06-04 20:30:18 UTC (rev 667)
@@ -15,7 +15,7 @@
 
 # command line parser
 parser = create_base_argparser(__command)
-parser.add_argument('-p', '--path', help='root logical_path for search', default=get_default_search_path())
+parser.add_argument('-p', '--path', help='root logical folder for search', default=get_default_search_path())
 parser.add_argument('-s', '--site', help='dataset site', default=get_default_site())
 parser.add_argument('-o', '--output', help='save results to output file')
 parser.add_argument('-q', '--query', help='data query for filtering results')
@@ -23,7 +23,7 @@
 
 # get standard arguments
 connection, dry_run, mode = handle_standard_arguments(args)    
-logical_path = args['path']
+logical_folder = args['path']
 site = args['site']
 check_valid_site(site)
     
@@ -40,7 +40,7 @@
 command_line += ' --site %s' % site
 if query != None:
     command_line += ' %s' % query
-command_line += ' %s' % logical_path     
+command_line += ' %s' % logical_folder     
  
 # setup the output file if specified
 output = None

java/sandbox/data-cat/src/main/python/hpsdatacat
register.py 666 -> 667
--- java/sandbox/data-cat/src/main/python/hpsdatacat/register.py	2014-06-04 20:15:51 UTC (rev 666)
+++ java/sandbox/data-cat/src/main/python/hpsdatacat/register.py	2014-06-04 20:30:18 UTC (rev 667)
@@ -12,18 +12,18 @@
     
 # create command line parser
 parser = create_base_argparser(__command)
-parser.add_argument('-p', '--logical_path', help='destination logical logical_path in the data catalog', required=True)
+parser.add_argument('-p', '--path', help='destination logical folder in the data catalog', required=True)
 parser.add_argument('-f', '--file', help='input physical file to register', required=True)
 parser.add_argument('-d', '--define', help='define a field with format key=value', action='append')
-parser.add_argument('-g', '--group', help='group under the logical_path', default=get_default_group())
-parser.add_argument('-s', '--site', help='site of the new file', default=get_default_site())
+parser.add_argument('-g', '--group', help='dataset group', default=get_default_group())
+parser.add_argument('-s', '--site', help='site of the physical file', default=get_default_site())
 args = vars(parser.parse_args())
 
 # process command line arguments
 connection, dry_run, mode = handle_standard_arguments(args)            
-logical_path = args['logical_path']
-dataset_name = args['file']
-file_extension = os.path.splitext(dataset_name)[1][1:]
+logical_folder = args['path']
+file_path = args['file']
+file_extension = os.path.splitext(file_path)[1][1:]
 group = args['group']
 site = args['site']
 check_valid_site(site)
@@ -39,16 +39,16 @@
 command_line += ' --group %s --site %s' % (group, site)
 if metadata != None:
     command_line += ' %s' % metadata    
-command_line += ' %s %s %s' % (file_extension, logical_path, dataset_name)
+command_line += ' %s %s %s' % (file_extension, logical_folder, file_path)
 
 # run the command
 lines, errors, return_value = run_process(command_line)
 
-# print dataset_name information for new dataset
+# print file_path information for new dataset
 if return_value == 0:
     print 'Added dataset to catalog ...'
-    print '  file: %s' % dataset_name
-    print '  logical_path: %s' % logical_path
+    print '  file: %s' % file_path
+    print '  folder: %s' % logical_folder
     print '  group: %s' % group
     print '  site: %s' % site
     print '  metadata: %s' % str(raw_metadata)
SVNspam 0.1