diff --git a/vars/uploadDraftset.groovy b/vars/uploadDraftset.groovy index 5caadd7..ac5a53c 100644 --- a/vars/uploadDraftset.groovy +++ b/vars/uploadDraftset.groovy @@ -1,9 +1,23 @@ -def call(String datasetLabel, csvs) { +def call(String datasetLabel, csvs, String mapping=null) { configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) { def config = readJSON(text: readFile(file: configfile)) String PMD = config['pmd_api'] String credentials = config['credentials'] String PIPELINE = config['pipeline_api'] + if (!mapping) { + if (fileExists 'metadata/columns.csv') { + mapping = 'metadata/columns.csv' + } else { + mapping = config['default_mapping'] + } + } + if (mapping.startsWith('http')) { + def response = httpRequest( + httpMode: 'GET', + url: mapping) + writeFile file: 'out/columns.csv', text: response.content + mapping = 'out/columns.csv' + } def drafts = drafter.listDraftsets(PMD, credentials, 'owned') def jobDraft = drafts.find { it['display-name'] == env.JOB_NAME } if (jobDraft) { @@ -20,12 +34,15 @@ "http://gss-data.org.uk/graph/${datasetPath}") drafter.addData(PMD, credentials, newJobDraft.id, readFile("out/dataset.trig"), "application/trig;charset=UTF-8") + csvs.each { csv -> echo "Uploading ${csv}" runPipeline("${PIPELINE}/ons-table2qb.core/data-cube/import", newJobDraft.id, credentials, [[name: 'observations-csv', file: [name: csv, type: 'text/csv;charset=UTF-8']], - [name: 'dataset-name', value: datasetLabel]]) + [name: 'dataset-name', value: datasetLabel], + [name: 'columns-csv', + file: [name: mapping, type: 'text/csv;charset=UTF-8']]]) } } } \ No newline at end of file