diff --git a/vars/dataset.groovy b/vars/dataset.groovy
index 8765ffb..312886d 100644
--- a/vars/dataset.groovy
+++ b/vars/dataset.groovy
@@ -7,7 +7,7 @@
         String credentials = config['credentials']
         String baseURI = config['base_uri']
 
-        def draftset = drafter.findOrCreateDraftset(PMD, credentials, env.JOB_NAME as String) // assume it already exists
+        def draftset = jobDraft.find() // assume it already exists
 
         String datasetPath = util.slugise(datasetLabel)
         String datasetGraph = "${baseURI}/graph/${datasetPath}"
diff --git a/vars/drafter.groovy b/vars/drafter.groovy
index 469136a..be0cf40 100644
--- a/vars/drafter.groovy
+++ b/vars/drafter.groovy
@@ -23,18 +23,6 @@
     }
 }
 
-def findOrCreateDraftset(String baseUrl, String credentials, String displayName) {
-    echo "Finding or creating draftset with display name '${displayName}'"
-
-    def drafts = listDraftsets(baseUrl, credentials, 'owned')
-    def draftset = drafts.find  { it['display-name'] == displayName }
-    if (draftset) {
-        draftset
-    } else {
-        createDraftset(baseUrl, credentials, displayName)
-    }
-}
-
 def deleteDraftset(String baseUrl, String credentials, String id) {
     echo "Deleting draftset ${id}"
     def response = httpRequest(acceptType: 'APPLICATION_JSON',
diff --git a/vars/jobDraft.groovy b/vars/jobDraft.groovy
new file mode 100644
index 0000000..cc6edc7
--- /dev/null
+++ b/vars/jobDraft.groovy
@@ -0,0 +1,48 @@
+def create() {
+    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
+        def config = readJSON(text: readFile(file: configfile))
+        String PMD = config['pmd_api']
+        String credentials = config['credentials']
+
+        drafter.createDraftset(PMD, credentials, env.JOB_NAME)
+    }
+}
+
+def delete() {
+    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
+        def config = readJSON(text: readFile(file: configfile))
+        String PMD = config['pmd_api']
+        String credentials = config['credentials']
+
+        def draft = drafter.findDraftset(PMD, credentials, env.JOB_NAME)
+        drafter.deleteDraftset(PMD, credentials, draft.id)
+    }
+}
+
+def replace() {
+    try {
+      delete()
+    }
+    create()
+}
+
+def find() {
+    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
+        def config = readJSON(text: readFile(file: configfile))
+        String PMD = config['pmd_api']
+        String credentials = config['credentials']
+
+        drafter.findDraftset(PMD, credentials, env.JOB_NAME)
+    }
+}
+
+def publish() {
+    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
+        def config = readJSON(text: readFile(file: configfile))
+        String PMD = config['pmd_api']
+        String credentials = config['credentials']
+
+        def draft = drafter.findDraftset(PMD, credentials, env.JOB_NAME)
+        drafter.publishDraftset(PMD, credentials, draft.id)
+    }
+}
diff --git a/vars/uploadTidy.groovy b/vars/uploadTidy.groovy
index 69ed530..51df380 100644
--- a/vars/uploadTidy.groovy
+++ b/vars/uploadTidy.groovy
@@ -22,27 +22,20 @@
             mapping = 'metadata/columns.csv'
         }
 
-        try {
-            def oldJobDraft = drafter.findDraftset(PMD, credentials, env.JOB_NAME)
-            drafter.deleteDraftset(PMD, credentials, oldJobDraft.id)
-        } catch(e) {
-            echo 'No old job draft to delete'
-        }
-
-        def newJobDraft = drafter.findOrCreateDraftset(PMD, credentials, env.JOB_NAME)
+        def draft = jobDraft.find()
 
         dataset.delete(env.JOB_NAME)
         if (oldLabel) {
             dataset.delete(oldLabel)
         }
-        drafter.addData(PMD, credentials, newJobDraft.id,
+        drafter.addData(PMD, credentials, draft.id,
                 readFile("out/dataset.trig"), "application/trig;charset=UTF-8")
 
         String datasetPath = util.slugise(env.JOB_NAME)
         csvs.each { csv ->
             echo "Uploading ${csv}"
             runPipeline("${PIPELINE}/ons-table2qb.core/data-cube/import",
-                    newJobDraft.id, credentials, [
+                    draft.id, credentials, [
                     [name: 'observations-csv',
                      file: [name: csv, type: 'text/csv;charset=UTF-8']],
                     [name: 'dataset-name', value: ''],