diff --git a/vars/dataset.groovy b/vars/dataset.groovy index 312886d..3329507 100644 --- a/vars/dataset.groovy +++ b/vars/dataset.groovy @@ -1,5 +1,5 @@ -def delete(String datasetLabel) { - echo "Deleting dataset graphs from label ${datasetLabel}" +def delete(String datasetPath) { + echo "Deleting data/ meta graphs for path ${datasetPath}" configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) { def config = readJSON(text: readFile(file: configfile)) @@ -9,7 +9,6 @@ def draftset = jobDraft.find() // assume it already exists - String datasetPath = util.slugise(datasetLabel) String datasetGraph = "${baseURI}/graph/${datasetPath}" String metadataGraph = "${datasetGraph}/metadata" drafter.deleteGraph(PMD, credentials, draftset.id, metadataGraph) diff --git a/vars/uploadTidy.groovy b/vars/uploadTidy.groovy index 51df380..7ca1c8f 100644 --- a/vars/uploadTidy.groovy +++ b/vars/uploadTidy.groovy @@ -1,4 +1,4 @@ -def call(csvs, String mapping=null, String oldLabel=null) { +def call(csvs, String mapping=null, String datasetPath=util.slugise(env.JOB_NAME)) { configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) { def config = readJSON(text: readFile(file: configfile)) String PMD = config['pmd_api'] @@ -22,16 +22,13 @@ mapping = 'metadata/columns.csv' } + dataset.delete(datasetPath) + def draft = jobDraft.find() - dataset.delete(env.JOB_NAME) - if (oldLabel) { - dataset.delete(oldLabel) - } drafter.addData(PMD, credentials, draft.id, readFile("out/dataset.trig"), "application/trig;charset=UTF-8") - String datasetPath = util.slugise(env.JOB_NAME) csvs.each { csv -> echo "Uploading ${csv}" runPipeline("${PIPELINE}/ons-table2qb.core/data-cube/import",