diff --git a/src/uk/org/floop/jenkins_pmd/Drafter.groovy b/src/uk/org/floop/jenkins_pmd/Drafter.groovy
index 735ab48..973250c 100644
--- a/src/uk/org/floop/jenkins_pmd/Drafter.groovy
+++ b/src/uk/org/floop/jenkins_pmd/Drafter.groovy
@@ -2,13 +2,17 @@
 
 import groovy.json.JsonSlurper
 import org.apache.http.HttpHost
+import org.apache.http.HttpResponse
 import org.apache.http.client.fluent.Executor
 import org.apache.http.client.fluent.Request
+import org.apache.http.entity.ContentType
+import org.apache.http.util.EntityUtils
 
 class Drafter implements Serializable {
-    private Executor exec
+    private PMD pmd
     private URI apiBase
     private HttpHost host
+    private String user, pass
 
     enum Include {
         ALL("all"), OWNED("owned"), CLAIMABLE("claimable")
@@ -18,22 +22,157 @@
         }
     }
 
-    Drafter(String api, String user, String pass) {
-        this.apiBase = new URI(api)
-        this.host = new HttpHost(apiBase.getHost())
-        exec = Executor.newInstance()
-            .auth(this.host, user, pass)
-            .authPreemptive(this.host)
+    Drafter(PMD pmd, String user, String pass) {
+        this.pmd = pmd
+        this.apiBase = new URI(pmd.config.pmd_api)
+        this.host = new HttpHost(apiBase.getHost(), (apiBase.getPort() != -1) ? apiBase.getPort() :
+                apiBase.getScheme() == "http" ? 80 : 443)
+        this.user = user
+        this.pass = pass
+    }
+
+    private Executor getExec() {
+        Executor.newInstance()
+                .auth(this.host, this.user, this.pass)
+                .authPreemptive(this.host)
     }
 
     def listDraftsets(Include include=Include.ALL) {
         def js = new JsonSlurper()
-        String path = (include == Include.ALL) ? "/v1/draftsets" : "/v1/draftsets?include=" + Include.value
+        String path = (include == Include.ALL) ? "/v1/draftsets" : "/v1/draftsets?include=" + include.value
         def response = js.parse(
-                exec.execute(
+                getExec().execute(
                         Request.Get(apiBase.resolve(path))
+                                .addHeader("Accept", "application/json")
+                                .userAgent(PMDConfig.UA)
                 ).returnContent().asStream()
         )
         response
     }
-}
\ No newline at end of file
+
+    private static String errorMsg(HttpResponse response) {
+        EntityUtils.consume(response.getEntity())
+        "${response.getStatusLine()} : ${EntityUtils.toString(response.getEntity())}"
+    }
+
+    def createDraftset(String label) {
+        String displayName = URLEncoder.encode(label, "UTF-8")
+        String path = "/v1/draftsets?display-name=${displayName}"
+        HttpResponse response = getExec().execute(
+                        Request.Post(apiBase.resolve(path))
+                                .addHeader("Accept", "application/json")
+                                .userAgent(PMDConfig.UA)
+                ).returnResponse()
+        if (response.getStatusLine().statusCode == 200) {
+            return new JsonSlurper().parse(EntityUtils.toByteArray(response.getEntity()))
+        } else {
+            throw new DrafterException("Problem creating draftset ${errorMsg(response)}")
+        }
+    }
+
+    def deleteGraph(String draftsetId, String graph) {
+        String encGraph = URLEncoder.encode(graph, "UTF-8")
+        String path = "/v1/draftset/${draftsetId}/graph?graph=${encGraph}&silent=true"
+        HttpResponse response = getExec().execute(
+                Request.Delete(apiBase.resolve(path))
+                        .addHeader("Accept", "application/json")
+                        .userAgent(PMDConfig.UA)
+        ).returnResponse()
+        if (response.getStatusLine().statusCode == 200) {
+            return new JsonSlurper().parse(EntityUtils.toByteArray(response.getEntity()))
+        } else {
+            throw new DrafterException("Problem deleting graph ${errorMsg(response)}")
+        }
+    }
+
+    def deleteDraftset(String draftsetId) {
+        String path = "/v1/draftset/${draftsetId}"
+        HttpResponse response = getExec().execute(
+                        Request.Delete(apiBase.resolve(path))
+                                .addHeader("Accept", "application/json")
+                                .userAgent(PMDConfig.UA)
+        ).returnResponse()
+        if (response.getStatusLine().statusCode == 202) {
+            def jobObj = new JsonSlurper().parse(EntityUtils.toByteArray(response.getEntity()))
+            waitForJob(apiBase.resolve(jobObj['finished-job'] as String), jobObj['restart-id'] as String)
+        } else {
+            throw new DrafterException("Problem deleting draftset ${jobObj['message']}")
+        }
+    }
+
+    def waitForJob(URI finishedJob, String restartId) {
+        Executor exec = getExec()
+        while (true) {
+            HttpResponse jobResponse = exec.execute(
+                    Request.Get(finishedJob)
+                            .setHeader("Accept", "application/json")
+                            .userAgent(PMDConfig.UA)
+            ).returnResponse()
+            int status = jobResponse.getStatusLine().statusCode
+            def jobObj
+            try {
+                jobObj = new JsonSlurper().parse(EntityUtils.toByteArray(jobResponse.getEntity()))
+            } catch(e) {
+                throw new DrafterException("Failed waiting for job ${errorMsg(jobResponse)}.\n${e}")
+            }
+            if (status == 404) {
+                if (jobObj['restart-id'] != restartId) {
+                    throw new DrafterException("Failed waiting for job to finish, no/different restart-id ${errorMsg(jobResponse)}")
+                } else {
+                    sleep(10000)
+                }
+            } else if (status == 200) {
+                if (jobObj['restart-id'] != restartId) {
+                    throw new DrafterException("Failed waiting for job to finish, restart-id is different.")
+                } else if (jobObj['type'] != 'ok') {
+                    throw new DrafterException("Pipeline error in ${jobObj.details?.pipeline?.name}. ${jobObj.message}")
+                } else {
+                    break
+                }
+            } else {
+                throw new DrafterException("Unexpected response waiting for job to complete: ${errorMsg(jobResponse)}")
+            }
+        }
+    }
+
+    def addData(String draftId, String fileName, String mimeType, String encoding, String graph=null) {
+        String path = "/v1/draftset/${draftId}/data"
+        if (graph) {
+            String encGraph = URLEncoder.encode(graph, "UTF-8")
+            path = path + "?graph=${encGraph}"
+        }
+        HttpResponse response = getExec().execute(
+                Request.Put(apiBase.resolve(path))
+                        .addHeader("Accept", "application/json")
+                        .userAgent(PMDConfig.UA)
+                        .bodyFile(new File(fileName), ContentType.create(mimeType, encoding))
+        ).returnResponse()
+        if (response.getStatusLine().statusCode == 202) {
+            def jobObj = new JsonSlurper().parse(EntityUtils.toByteArray(response.getEntity()))
+            waitForJob(apiBase.resolve(jobObj['finished-job'] as String), jobObj['restart-id'] as String)
+        } else {
+            throw new DrafterException("Problem adding data ${errorMsg(response)}")
+        }
+    }
+
+    def findDraftset(String displayName) {
+        def drafts = listDraftsets(Include.OWNED)
+        def draftset = drafts.find  { it['display-name'] == displayName }
+        if (draftset) {
+            draftset
+        } else {
+            throw new DrafterException("Can't find draftset with the display-name '${displayName}'")
+        }
+
+    }
+
+}
+
+class DrafterException extends Throwable {
+    String message
+
+    DrafterException(String message) {
+        this.message = message
+    }
+
+}
diff --git a/src/uk/org/floop/jenkins_pmd/PMD.groovy b/src/uk/org/floop/jenkins_pmd/PMD.groovy
new file mode 100644
index 0000000..d5d6872
--- /dev/null
+++ b/src/uk/org/floop/jenkins_pmd/PMD.groovy
@@ -0,0 +1,13 @@
+package uk.org.floop.jenkins_pmd
+
+class PMD implements Serializable {
+    PMDConfig config
+    Drafter drafter
+    Pipelines pipelines
+
+    PMD(PMDConfig config, String user, String pass) {
+        this.config = config
+        this.drafter = new Drafter(this, user, pass)
+        this.pipelines = new Pipelines(this, user, pass)
+    }
+}
diff --git a/src/uk/org/floop/jenkins_pmd/PMDConfig.groovy b/src/uk/org/floop/jenkins_pmd/PMDConfig.groovy
new file mode 100644
index 0000000..f49f5c7
--- /dev/null
+++ b/src/uk/org/floop/jenkins_pmd/PMDConfig.groovy
@@ -0,0 +1,11 @@
+package uk.org.floop.jenkins_pmd
+
+class PMDConfig implements Serializable {
+    static final String UA = "uk.org.floop.jenkins_pmd/0.1"
+    String pmd_api
+    String credentials
+    String pipeline_api
+    String default_mapping
+    String base_uri
+
+}
diff --git a/src/uk/org/floop/jenkins_pmd/Pipelines.groovy b/src/uk/org/floop/jenkins_pmd/Pipelines.groovy
new file mode 100644
index 0000000..2ba085e
--- /dev/null
+++ b/src/uk/org/floop/jenkins_pmd/Pipelines.groovy
@@ -0,0 +1,92 @@
+package uk.org.floop.jenkins_pmd
+
+import groovy.json.JsonOutput
+import groovy.json.JsonSlurper
+import org.apache.http.HttpHost
+import org.apache.http.HttpResponse
+import org.apache.http.client.fluent.Executor
+import org.apache.http.client.fluent.Request
+import org.apache.http.entity.ContentType
+import org.apache.http.entity.mime.MultipartEntityBuilder
+import org.apache.http.util.EntityUtils
+
+class Pipelines implements Serializable {
+    private PMD pmd
+    private URI apiBase
+    private HttpHost host
+    private String user, pass, basicAuth
+
+    Pipelines(PMD pmd, String user, String pass) {
+        this.pmd = pmd
+        this.apiBase = new URI(pmd.config.pipeline_api)
+        this.host = new HttpHost(apiBase.getHost(), (apiBase.getPort() != -1) ? apiBase.getPort() :
+                apiBase.getScheme() == "http" ? 80 : 443)
+        this.user = user
+        this.pass = pass
+        this.basicAuth = "${user}:${pass}".bytes.encodeBase64()
+    }
+
+    private Executor getExec() {
+        Executor.newInstance()
+                .auth(this.host, this.user, this.pass)
+                .authPreemptive(this.host)
+    }
+
+    private static String errorMsg(HttpResponse response) {
+        "${response.getStatusLine()} : ${EntityUtils.toString(response.getEntity())}"
+    }
+
+    def dataCube(String draftsetId, String observationsFilename, String datasetName, String datasetPath, String mapping) {
+        String path = "/v1/pipelines/ons-table2qb.core/data-cube/import"
+        MultipartEntityBuilder body = MultipartEntityBuilder.create()
+        body.addTextBody('__endpoint-type', 'grafter-server.destination/draftset-update')
+        body.addTextBody('__endpoint', JsonOutput.toJson([
+                url: "http://localhost:3001/v1/draftset/${draftsetId}/data",
+                headers: [Authorization: "Basic ${basicAuth}"]
+        ]))
+        body.addBinaryBody(
+                'observations-csv',
+                new FileInputStream(observationsFilename),
+                ContentType.create('text/csv', 'UTF-8'),
+                observationsFilename
+        )
+        body.addTextBody('dataset-name', datasetName)
+        body.addTextBody('dataset-slug', datasetPath)
+        InputStream mappingStream
+        if (mapping.startsWith('http')) {
+            mappingStream = Request
+                    .Get(mapping)
+                    .userAgent(PMDConfig.UA)
+                    .addHeader('Accept', 'text/csv')
+                    .execute().returnContent().asStream()
+        } else {
+            mappingStream = new FileInputStream(mapping)
+        }
+        body.addBinaryBody(
+                'columns-csv',
+                mappingStream,
+                ContentType.create('text/csv', 'UTF-8'),
+                mapping
+        )
+        HttpResponse response = getExec().execute(
+                Request.Post(apiBase.resolve(path))
+                        .addHeader("Accept", "application/json")
+                        .userAgent(PMDConfig.UA)
+                .body(body.build())
+        ).returnResponse()
+        if (response.getStatusLine().statusCode == 202) {
+            def jobObj = new JsonSlurper().parse(EntityUtils.toByteArray(response.getEntity()))
+            this.pmd.drafter.waitForJob(apiBase.resolve(jobObj['finished-job'] as String), jobObj['restart-id'] as String)
+        } else {
+            throw new PipelineException("Failed pipeline import: ${errorMsg(response)}")
+        }
+    }
+}
+
+class PipelineException extends Throwable {
+    String message
+
+    PipelineException(String message) {
+        this.message = message
+    }
+}
\ No newline at end of file
diff --git a/test/integration/groovy/uk/org/floop/jenkins_pmd/DrafterTests.groovy b/test/integration/groovy/uk/org/floop/jenkins_pmd/DrafterTests.groovy
index a82d280..cd48a59 100644
--- a/test/integration/groovy/uk/org/floop/jenkins_pmd/DrafterTests.groovy
+++ b/test/integration/groovy/uk/org/floop/jenkins_pmd/DrafterTests.groovy
@@ -6,6 +6,7 @@
 import com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl
 import com.github.tomakehurst.wiremock.core.WireMockConfiguration
 import com.github.tomakehurst.wiremock.junit.WireMockClassRule
+import com.github.tomakehurst.wiremock.stubbing.Scenario
 import org.jenkinsci.lib.configprovider.ConfigProvider
 import org.jenkinsci.plugins.configfiles.ConfigFileStore
 import org.jenkinsci.plugins.configfiles.GlobalConfigFiles
@@ -28,7 +29,8 @@
 
     @ClassRule
     public static WireMockClassRule wireMockRule = new WireMockClassRule(WireMockConfiguration.options()
-            .dynamicPort()
+            //.dynamicPort()
+            .port(8123)
             .usingFilesUnderClasspath("test/resources")
     )
 
@@ -95,10 +97,15 @@
     @Test
     void "listDraftsets"() {
         stubFor(get(urlMatching("/v1/draftsets.*"))
-                .willReturn(ok().withBodyFile("listDraftsets.json")))
+                .withHeader("Accept", equalTo("application/json"))
+                .withBasicAuth("admin", "admin")
+                .willReturn(ok()
+                .withBodyFile("listDraftsets.json")
+                .withHeader("Content-Type", "application/json")))
         final CpsFlowDefinition flow = new CpsFlowDefinition('''
         node {
-            echo drafter("pmd").listDraftsets()[0].id
+            def pmd = pmdConfig("pmd")
+            echo pmd.drafter.listDraftsets()[0].id
         }'''.stripIndent(), true)
         final WorkflowJob workflowJob = rule.createProject(WorkflowJob, 'project')
         workflowJob.definition = flow
@@ -108,4 +115,81 @@
         rule.assertLogContains('de305d54-75b4-431b-adb2-eb6b9e546014', firstResult)
     }
 
+    @Test
+    void "uploadTidy"() {
+        stubFor(post("/v1/draftsets?display-name=project")
+                .withHeader("Accept", equalTo("application/json"))
+                .withBasicAuth("admin", "admin")
+                .willReturn(seeOther("/v1/draftset/4e376c57-6816-404a-8945-94849299f2a0")))
+        stubFor(get(urlMatching("/v1/draftsets.*"))
+                .withHeader("Accept", equalTo("application/json"))
+                .withBasicAuth("admin", "admin")
+                .willReturn(ok()
+                    .withBodyFile("listDraftsets.json")
+                    .withHeader("Content-Type", "application/json")))
+        stubFor(get("/v1/draftset/4e376c57-6816-404a-8945-94849299f2a0")
+                .withHeader("Accept", equalTo("application/json"))
+                .withBasicAuth("admin", "admin")
+                .willReturn(ok()
+                    .withBodyFile("newDraftset.json")
+                    .withHeader("Content-Type", "application/json")))
+        stubFor(delete("/v1/draftset/4e376c57-6816-404a-8945-94849299f2a0")
+                .withHeader("Accept", equalTo("application/json"))
+                .withBasicAuth("admin", "admin")
+                .willReturn(aResponse()
+                    .withStatus(202)
+                    .withBodyFile("deleteJob.json")
+                    .withHeader("Content-Type", "application/json")))
+        stubFor(delete(urlMatching("/v1/draftset/4e376c57-6816-404a-8945-94849299f2a0/graph.*"))
+                .withHeader("Accept", equalTo("application/json"))
+                .withBasicAuth("admin", "admin")
+                .willReturn(aResponse()
+                    .withStatus(200)
+                    .withBodyFile("deleteGraph.json")
+                    .withHeader("Content-Type", "application/json")))
+        stubFor(put("/v1/draftset/4e376c57-6816-404a-8945-94849299f2a0/data")
+                .withHeader("Accept", equalTo("application/json"))
+                .withBasicAuth("admin", "admin")
+                .willReturn(aResponse()
+                    .withStatus(202)
+                    .withBodyFile("addDataJob.json")
+                    .withHeader("Content-Type", "application/json")))
+        stubFor(get("/v1/status/finished-jobs/2c4111e5-a299-4526-8327-bad5996de400").inScenario("Delete draftset")
+                .whenScenarioStateIs(Scenario.STARTED)
+                .withHeader("Accept", equalTo("application/json"))
+                .withBasicAuth("admin", "admin")
+                .willReturn(aResponse().withStatus(404).withBodyFile('notFinishedJob.json'))
+                .willSetStateTo("Finished"))
+        stubFor(get("/v1/status/finished-jobs/2c4111e5-a299-4526-8327-bad5996de400").inScenario("Delete draftset")
+                .whenScenarioStateIs("Finished")
+                .withHeader("Accept", equalTo("application/json"))
+                .withBasicAuth("admin", "admin")
+                .willReturn(ok()
+                    .withBodyFile("finishedJobOk.json")))
+        stubFor(get("/columns.csv").willReturn(ok().withBodyFile('columns.csv')))
+        stubFor(post("/v1/pipelines/ons-table2qb.core/data-cube/import")
+                .withHeader('Accept', equalTo('application/json'))
+                .withBasicAuth('admin', 'admin')
+                .willReturn(aResponse().withStatus(202).withBodyFile('cubeImportJob.json')))
+        stubFor(get('/status/finished-jobs/4fc9ad42-f964-4f56-a1ab-a00bd622b84c')
+                .withHeader('Accept', equalTo('application/json'))
+                .withBasicAuth('admin', 'admin')
+                .willReturn(ok().withBodyFile('finishedImportJobOk.json')))
+
+        final CpsFlowDefinition flow = new CpsFlowDefinition("""
+        node {
+            dir('out') {
+              writeFile file:'dataset.trig', text:'dummy:data'
+              writeFile file:'observations.csv', text:'Dummy,CSV'
+            }
+            jobDraft.replace()
+            uploadTidy(['out/observations.csv'],
+                       '${wireMockRule.baseUrl()}/columns.csv')
+        }""".stripIndent(), true)
+        final WorkflowJob workflowJob = rule.createProject(WorkflowJob, 'project')
+        workflowJob.definition = flow
+
+        final WorkflowRun firstResult = rule.buildAndAssertSuccess(workflowJob)
+    }
+
 }
diff --git a/test/resources/__files/addDataJob.json b/test/resources/__files/addDataJob.json
new file mode 100644
index 0000000..b1e0a53
--- /dev/null
+++ b/test/resources/__files/addDataJob.json
@@ -0,0 +1,5 @@
+{
+  "type": "ok",
+  "restart-id": "b1b26596-2dca-4e52-883c-7fdcb8b4be97",
+  "finished-job": "/v1/status/finished-jobs/2c4111e5-a299-4526-8327-bad5996de400"
+}
\ No newline at end of file
diff --git a/test/resources/__files/columns.csv b/test/resources/__files/columns.csv
new file mode 100644
index 0000000..74c5416
--- /dev/null
+++ b/test/resources/__files/columns.csv
@@ -0,0 +1,31 @@
+title,name,component_attachment,property_template,value_template,datatype,value_transformation
+Age,age,qb:dimension,http://purl.org/linked-data/sdmx/2009/dimension#age,http://gss-data.org.uk/def/concept/ages/{age},string,slugize
+Area of Destination or Origin,area_of_destination_or_origin,qb:dimension,http://gss-data.org.uk/def/dimension/area-of-destination-or-origin,http://statistics.data.gov.uk/id/statistical-geography/{area_of_destination_or_origin},string,
+CI,ci,qb:attribute,http://gss-data.org.uk/def/attribute/ci,,number,
+Citizenship,citizenship,qb:dimension,http://gss-data.org.uk/def/dimension/citizenship,http://gss-data.org.uk/def/concept/citizenship/{citizenship},string,slugize
+Count,count,qb:measure,http://gss-data.org.uk/def/measure/count,,number,
+Country of Residence,country_of_residence,qb:dimension,http://gss-data.org.uk/def/dimension/residence,http://gss-data.org.uk/def/concept/country-of-residence/{country_of_residence},string,slugize
+Flow,migration,qb:dimension,http://gss-data.org.uk/def/dimension/migration,http://gss-data.org.uk/def/concept/migration-directions/{migration},string,slugize
+Geography,geography,qb:dimension,http://purl.org/linked-data/sdmx/2009/dimension#refArea,http://statistics.data.gov.uk/id/statistical-geography/{geography},string,
+Measure Type,measure_type,qb:dimension,http://purl.org/linked-data/cube#measureType,http://gss-data.org.uk/def/measure/{measure_type},string,slugize
+Mid Year,mid_year,qb:dimension,http://purl.org/linked-data/sdmx/2009/dimension#refPeriod,http://reference.data.gov.uk/id/gregorian-interval/{+mid_year},string,
+Migration,migration_type,qb:dimension,http://gss-data.org.uk/def/dimension/migration-type,http://gss-data.org.uk/def/concept/migration-types/{migration_type},string,slugize
+Note,note,qb:attribute,http://purl.org/linked-data/sdmx/2009/attribute#comment,,string,
+Occupation,occupation,qb:dimension,http://gss-data.org.uk/def/dimension/occupation,http://gss-data.org.uk/def/concept/occupations/{occupation},string,slugize
+Registration Geography,registration_geography,qb:dimension,http://purl.org/linked-data/sdmx/2009/dimension#refArea,http://gss-data.org.uk/def/concept/registration-geography/{registration_geography},string,slugize
+Period,period,qb:dimension,http://purl.org/linked-data/sdmx/2009/dimension#refPeriod,http://reference.data.gov.uk/id/{+period},string,
+Reason for migration,reason_for_migration,qb:dimension,http://gss-data.org.uk/def/dimension/reason-for-migration,http://gss-data.org.uk/def/concept/reasons-for-migration/{reason_for_migration},string,slugize
+Revision,revision,qb:dimension,http://gss-data.org.uk/def/dimension/revision,http://gss-data.org.uk/def/concept/revisions/{revision},string,slugize
+Sex,sex,qb:dimension,http://purl.org/linked-data/sdmx/2009/dimension#sex,http://purl.org/linked-data/sdmx/2009/code#sex-{sex},string,
+Unit,unit,qb:attribute,http://purl.org/linked-data/sdmx/2009/attribute#unitMeasure,http://gss-data.org.uk/def/concept/measurement-units/{unit},string,unitize
+Value,value,,http://gss-data.org.uk/def/measure/{measure_type},,number,
+Year,year,qb:dimension,http://purl.org/linked-data/sdmx/2009/dimension#refPeriod,http://reference.data.gov.uk/id/year/{year},string,
+Migration Flow,migration_flow,qb:dimension,http://gss-data.org.uk/def/dimension/migration,http://gss-data.org.uk/def/concept/migration-directions/{migration_flow},string,slugize
+Population Change Component,population_change_component,qb:dimension,http://gss-data.org.uk/def/dimension/population-change-component,http://gss-data.org.uk/def/concept/population-change-component/{population_change_component},string,slugize
+IPS Citizenship,ips_citizenship,qb:dimension,http://gss-data.org.uk/def/dimension/citizenship,http://gss-data.org.uk/def/concept/ips-citizenship/{ips_citizenship},string,
+LTIM Citizenship,ltim_citizenship,qb:dimension,http://gss-data.org.uk/def/dimension/citizenship,http://gss-data.org.uk/def/concept/ltim-citizenship/{ltim_citizenship},string,slugize
+Applicant type,applicant_type,qb:dimension,http://gss-data.org.uk/def/dimension/applicant-type,http://gss-data.org.uk/def/concept/ho-applicant-types/{applicant_type},string,slugize
+Application category,application_category,qb:dimension,http://gss-data.org.uk/def/dimension/application-category,http://gss-data.org.uk/def/concept/ho-application-category/{application_category},string,slugize
+Application type,application_type,qb:dimension,http://gss-data.org.uk/def/dimension/application-type,http://gss-data.org.uk/def/concept/ho-application-types/{application_type},string,slugize
+Country of nationality,ho_citizenship,qb:dimension,http://gss-data.org.uk/def/dimension/citizenship,http://gss-data.org.uk/def/concept/ho-citizenship/{ho_citizenship},string,slugize
+Resolution,resolution,qb:dimension,http://gss-data.org.uk/def/dimension/resolution,http://gss-data.org.uk/def/concept/ho-application-resolutions/{resolution},string,slugize
diff --git a/test/resources/__files/cubeImportJob.json b/test/resources/__files/cubeImportJob.json
new file mode 100644
index 0000000..60ed849
--- /dev/null
+++ b/test/resources/__files/cubeImportJob.json
@@ -0,0 +1 @@
+{"type":"ok","finished-job":"/status/finished-jobs/4fc9ad42-f964-4f56-a1ab-a00bd622b84c","restart-id":"a2a8f11d-96fd-466e-adaf-85e8d07d4956"}
\ No newline at end of file
diff --git a/test/resources/__files/deleteGraph.json b/test/resources/__files/deleteGraph.json
new file mode 100644
index 0000000..6ae0adb
--- /dev/null
+++ b/test/resources/__files/deleteGraph.json
@@ -0,0 +1,17 @@
+{
+  "id": "4e376c57-6816-404a-8945-94849299f2a0",
+  "display-name": "project",
+  "description": "Jenkins dataset pipeline",
+  "changes": {
+    "http://opendatacommunities.org/graph/homelessness/households-accommodated/temporary-housing-types": {
+      "status": "updated"
+    },
+    "http://opendatacommunities.org/data/labour-force/employment-rate/employment-rate-by-age": {
+      "status": "deleted"
+    }
+  },
+  "updated-at": "2016-01-04T13:35:21.000Z",
+  "created-at": "2016-01-01T13:35:21.000Z",
+  "current-owner": "admin@opendatacommunities.org",
+  "submitted-by": "editor@opendatacommunities.org"
+}
\ No newline at end of file
diff --git a/test/resources/__files/deleteJob.json b/test/resources/__files/deleteJob.json
new file mode 100644
index 0000000..b1e0a53
--- /dev/null
+++ b/test/resources/__files/deleteJob.json
@@ -0,0 +1,5 @@
+{
+  "type": "ok",
+  "restart-id": "b1b26596-2dca-4e52-883c-7fdcb8b4be97",
+  "finished-job": "/v1/status/finished-jobs/2c4111e5-a299-4526-8327-bad5996de400"
+}
\ No newline at end of file
diff --git a/test/resources/__files/finishedImportJobOk.json b/test/resources/__files/finishedImportJobOk.json
new file mode 100644
index 0000000..a701e5d
--- /dev/null
+++ b/test/resources/__files/finishedImportJobOk.json
@@ -0,0 +1,4 @@
+{
+  "type": "ok",
+  "restart-id": "a2a8f11d-96fd-466e-adaf-85e8d07d4956"
+}
\ No newline at end of file
diff --git a/test/resources/__files/finishedJobOk.json b/test/resources/__files/finishedJobOk.json
new file mode 100644
index 0000000..cf3033a
--- /dev/null
+++ b/test/resources/__files/finishedJobOk.json
@@ -0,0 +1,4 @@
+{
+  "type": "ok",
+  "restart-id": "b1b26596-2dca-4e52-883c-7fdcb8b4be97"
+}
\ No newline at end of file
diff --git a/test/resources/__files/listDraftsets.json b/test/resources/__files/listDraftsets.json
index 6741754..2a2f4bf 100644
--- a/test/resources/__files/listDraftsets.json
+++ b/test/resources/__files/listDraftsets.json
@@ -15,5 +15,13 @@
     "created-at": "2016-01-01T13:35:21.000Z",
     "current-owner": "admin@opendatacommunities.org",
     "submitted-by": "editor@opendatacommunities.org"
+  }, {
+    "id": "4e376c57-6816-404a-8945-94849299f2a0",
+    "display-name": "project",
+    "description": "Jenkins dataset pipeline",
+    "updated-at": "2016-01-04T13:35:21.000Z",
+    "created-at": "2016-01-01T13:35:21.000Z",
+    "current-owner": "admin@opendatacommunities.org",
+    "submitted-by": "editor@opendatacommunities.org"
   }
 ]
\ No newline at end of file
diff --git a/test/resources/__files/newDraftset.json b/test/resources/__files/newDraftset.json
new file mode 100644
index 0000000..ffc4476
--- /dev/null
+++ b/test/resources/__files/newDraftset.json
@@ -0,0 +1,9 @@
+{
+"id": "4e376c57-6816-404a-8945-94849299f2a0",
+"display-name": "project",
+"description": "Jenkins dataset pipeline",
+"updated-at": "2016-01-04T13:35:21.000Z",
+"created-at": "2016-01-01T13:35:21.000Z",
+"current-owner": "admin@opendatacommunities.org",
+"submitted-by": "editor@opendatacommunities.org"
+}
\ No newline at end of file
diff --git a/test/resources/__files/notFinishedJob.json b/test/resources/__files/notFinishedJob.json
new file mode 100644
index 0000000..97e7b6e
--- /dev/null
+++ b/test/resources/__files/notFinishedJob.json
@@ -0,0 +1,4 @@
+{
+  "type": "waiting",
+  "restart-id": "b1b26596-2dca-4e52-883c-7fdcb8b4be97"
+}
\ No newline at end of file
diff --git a/vars/dataset.groovy b/vars/dataset.groovy
index 3329507..08224e5 100644
--- a/vars/dataset.groovy
+++ b/vars/dataset.groovy
@@ -1,17 +1,14 @@
+import uk.org.floop.jenkins_pmd.PMD
+
 def delete(String datasetPath) {
     echo "Deleting data/ meta graphs for path ${datasetPath}"
 
-    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
-        def config = readJSON(text: readFile(file: configfile))
-        String PMD = config['pmd_api']
-        String credentials = config['credentials']
-        String baseURI = config['base_uri']
+    PMD pmd = pmdConfig("pmd")
 
-        def draftset = jobDraft.find() // assume it already exists
+    def draftset = jobDraft.find() // assume it already exists
 
-        String datasetGraph = "${baseURI}/graph/${datasetPath}"
-        String metadataGraph = "${datasetGraph}/metadata"
-        drafter.deleteGraph(PMD, credentials, draftset.id, metadataGraph)
-        drafter.deleteGraph(PMD, credentials, draftset.id, datasetGraph)
-    }
+    String datasetGraph = "${pmd.config.base_uri}/graph/${datasetPath}"
+    String metadataGraph = "${datasetGraph}/metadata"
+    pmd.drafter.deleteGraph(draftset.id, metadataGraph)
+    pmd.drafter.deleteGraph(draftset.id, datasetGraph)
 }
diff --git a/vars/drafter.groovy b/vars/drafter.groovy
index 40ef169..2857355 100644
--- a/vars/drafter.groovy
+++ b/vars/drafter.groovy
@@ -1,70 +1,42 @@
 import uk.org.floop.jenkins_pmd.Drafter
+import uk.org.floop.jenkins_pmd.PMD
+import uk.org.floop.jenkins_pmd.PMDConfig
 
 def call(String configId) {
     configFileProvider([configFile(fileId: configId, variable: 'configfile')]) {
-        def config = readJSON(text: readFile(file: configfile))
-        String PMD = config['pmd_api']
-        String credentials = config['credentials']
-        withCredentials([usernamePassword(credentialsId: credentials, usernameVariable: 'USER', passwordVariable: 'PASS')]) {
-            return new Drafter(PMD, USER, PASS)
+        PMDConfig config = new PMDConfig(readJSON(text: readFile(file: configfile)))
+        withCredentials([usernamePassword(credentialsId: config.credentials, usernameVariable: 'USER', passwordVariable: 'PASS')]) {
+            return new PMD(config, USER as String, PASS as String).getDrafter()
         }
     }
 }
 
 def listDraftsets(String baseUrl, String credentials, String include) {
     echo "Listing draftsets..."
-/*    def response = httpRequest(acceptType: 'APPLICATION_JSON',
-            authentication: credentials,
-            httpMode: 'GET',
-            url: "${baseUrl}/v1/draftsets?include=${include}")
-    if (response.status == 200) {
-        return readJSON(text: response.content)
-    } else {
-        error "Problem listing draftsets ${response.status} : ${response.content}"
-    } */
-    return Drafter.listDraftsets()
+
+    PMD pmd = pmdConfig("pmd")
+    return pmd.drafter.listDraftsets()
 }
 
 def findDraftset(String baseUrl, String credentials, String displayName) {
     echo "Finding draftset with display name '${displayName}'"
 
-    def drafts = listDraftsets(baseUrl, credentials, 'owned')
-    def draftset = drafts.find  { it['display-name'] == displayName }
-    if (draftset) {
-        draftset
-    } else {
-        error "Can't find draftset with the display-name '${displayName}'"
-    }
+    PMD pmd = pmdConfig("pmd")
+    pmd.drafter.findDraftset(displayName)
 }
 
 def deleteDraftset(String baseUrl, String credentials, String id) {
     echo "Deleting draftset ${id}"
-    def response = httpRequest(acceptType: 'APPLICATION_JSON',
-            authentication: credentials,
-            httpMode: 'DELETE',
-            url: "${baseUrl}/v1/draftset/${id}")
-    if (response.status == 202) {
-        def job = readJSON(text: response.content)
-        waitForJob(
-                "${baseUrl}${job['finished-job']}" as String,
-                credentials, job['restart-id'] as String)
-    } else {
-        error "Problem deleting draftset ${response.status} : ${response.content}"
-    }
+
+    PMD pmd = pmdConfig("pmd")
+    pmd.drafter.deleteDraftset(id)
 }
 
 def createDraftset(String baseUrl, String credentials, String label) {
     echo "Creating draftset ${label}"
-    String displayName = java.net.URLEncoder.encode(label, "UTF-8")
-    def response = httpRequest(acceptType: 'APPLICATION_JSON',
-            authentication: credentials,
-            httpMode: 'POST',
-            url: "${baseUrl}/v1/draftsets?display-name=${displayName}")
-    if (response.status == 200) {
-        return readJSON(text: response.content)
-    } else {
-        error "Problem creating draftset ${response.status} : ${response.content}"
-    }
+
+    PMD pmd = pmdConfig("pmd")
+    pmd.drafter.createDraftset(label)
 }
 
 def queryDraftset(String baseUrl, String credentials, String id, String query, String type) {
@@ -84,16 +56,8 @@
 
 def deleteGraph(String baseUrl, String credentials, String id, String graph) {
     echo "Deleting graph <${graph}> from draftset ${id}"
-    String encGraph = java.net.URLEncoder.encode(graph, "UTF-8")
-    def response = httpRequest(acceptType: 'APPLICATION_JSON',
-                               authentication: credentials,
-                               httpMode: 'DELETE',
-                               url: "${baseUrl}/v1/draftset/${id}/graph?graph=${encGraph}&silent=true")
-    if (response.status == 200) {
-        return readJSON(text: response.content)
-    } else {
-        error "Problem deleting graph ${response.status} : ${response.content}"
-    }
+    PMD pmd = pmdConfig("pmd")
+    pmd.drafter.deleteGraph(id, graph)
 }
 
 def addData(String baseUrl, String credentials, String id, data, String type, String graph=null) {
diff --git a/vars/jobDraft.groovy b/vars/jobDraft.groovy
index 6b690df..1c0fd7e 100644
--- a/vars/jobDraft.groovy
+++ b/vars/jobDraft.groovy
@@ -1,24 +1,18 @@
+import uk.org.floop.jenkins_pmd.PMD
+
 def create() {
     echo "Creating job draft"
-    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
-        def config = readJSON(text: readFile(file: configfile))
-        String PMD = config['pmd_api']
-        String credentials = config['credentials']
 
-        drafter.createDraftset(PMD, credentials, env.JOB_NAME)
-    }
+    PMD pmd = pmdConfig("pmd")
+    pmd.drafter.createDraftset(env.JOB_NAME)
 }
 
 def delete() {
     echo "Deleting job draft"
-    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
-        def config = readJSON(text: readFile(file: configfile))
-        String PMD = config['pmd_api']
-        String credentials = config['credentials']
 
-        def draft = drafter.findDraftset(PMD, credentials, env.JOB_NAME)
-        drafter.deleteDraftset(PMD, credentials, draft.id)
-    }
+    PMD pmd = pmdConfig("pmd")
+    def draft = pmd.drafter.findDraftset(env.JOB_NAME)
+    pmd.drafter.deleteDraftset(draft.id)
 }
 
 def replace() {
@@ -34,23 +28,15 @@
 
 def find() {
     echo "Finding job draft"
-    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
-        def config = readJSON(text: readFile(file: configfile))
-        String PMD = config['pmd_api']
-        String credentials = config['credentials']
 
-        drafter.findDraftset(PMD, credentials, env.JOB_NAME)
-    }
+    PMD pmd = pmdConfig("pmd")
+    pmd.drafter.findDraftset(env.JOB_NAME)
 }
 
 def publish() {
     echo "Publishing job draft"
-    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
-        def config = readJSON(text: readFile(file: configfile))
-        String PMD = config['pmd_api']
-        String credentials = config['credentials']
 
-        def draft = drafter.findDraftset(PMD, credentials, env.JOB_NAME)
-        drafter.publishDraftset(PMD, credentials, draft.id)
-    }
+    PMD pmd = pmdConfig("pmd")
+    def draft = pmd.drafter.findDraftset(env.JOB_NAME)
+    pmd.drafter.publishDraftset(draft.id)
 }
diff --git a/vars/pmdConfig.groovy b/vars/pmdConfig.groovy
new file mode 100644
index 0000000..6a31755
--- /dev/null
+++ b/vars/pmdConfig.groovy
@@ -0,0 +1,12 @@
+import uk.org.floop.jenkins_pmd.Drafter
+import uk.org.floop.jenkins_pmd.PMD
+import uk.org.floop.jenkins_pmd.PMDConfig
+
+def call(String configId) {
+    configFileProvider([configFile(fileId: configId, variable: 'configfile')]) {
+        PMDConfig config = new PMDConfig(readJSON(text: readFile(file: configfile)))
+        withCredentials([usernamePassword(credentialsId: config.credentials, usernameVariable: 'USER', passwordVariable: 'PASS')]) {
+            return new PMD(config, USER as String, PASS as String)
+        }
+    }
+}
diff --git a/vars/publishDraftset.groovy b/vars/publishDraftset.groovy
index 1c92783..a2ee4de 100644
--- a/vars/publishDraftset.groovy
+++ b/vars/publishDraftset.groovy
@@ -1,5 +1,5 @@
 def call() {
-    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
+    configFileProvider([configFile(fileId: 'pmdConfig', variable: 'configfile')]) {
         def config = readJSON(text: readFile(file: configfile))
         String PMD = config['pmd_api']
         String credentials = config['credentials']
diff --git a/vars/uploadCodelist.groovy b/vars/uploadCodelist.groovy
index 1ddc6a6..4e1c5db 100644
--- a/vars/uploadCodelist.groovy
+++ b/vars/uploadCodelist.groovy
@@ -1,5 +1,5 @@
 def call(String csv, String name) {
-    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
+    configFileProvider([configFile(fileId: 'pmdConfig', variable: 'configfile')]) {
         def config = readJSON(text: readFile(file: configfile))
         String PMD = config['pmd_api']
         String credentials = config['credentials']
diff --git a/vars/uploadComponents.groovy b/vars/uploadComponents.groovy
index 3f431bd..eaa8621 100644
--- a/vars/uploadComponents.groovy
+++ b/vars/uploadComponents.groovy
@@ -1,5 +1,5 @@
 def call(String csv) {
-    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
+    configFileProvider([configFile(fileId: 'pmdConfig', variable: 'configfile')]) {
         def config = readJSON(text: readFile(file: configfile))
         String PMD = config['pmd_api']
         String credentials = config['credentials']
diff --git a/vars/uploadCube.groovy b/vars/uploadCube.groovy
index af66ac4..b7a87b3 100644
--- a/vars/uploadCube.groovy
+++ b/vars/uploadCube.groovy
@@ -1,5 +1,5 @@
 def call(String datasetLabel, obslist) {
-    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
+    configFileProvider([configFile(fileId: 'pmdConfig', variable: 'configfile')]) {
         def config = readJSON(text: readFile(file: configfile))
         String PMD = config['pmd_api']
         String credentials = config['credentials']
diff --git a/vars/uploadDraftset.groovy b/vars/uploadDraftset.groovy
index 184d8b7..3d044f7 100644
--- a/vars/uploadDraftset.groovy
+++ b/vars/uploadDraftset.groovy
@@ -1,5 +1,5 @@
 def call(String datasetLabel, csvs, String mapping=null, String datasetPath=null) {
-    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
+    configFileProvider([configFile(fileId: 'pmdConfig', variable: 'configfile')]) {
         def config = readJSON(text: readFile(file: configfile))
         String PMD = config['pmd_api']
         String credentials = config['credentials']
diff --git a/vars/uploadTidy.groovy b/vars/uploadTidy.groovy
index c5ddbef..442252b 100644
--- a/vars/uploadTidy.groovy
+++ b/vars/uploadTidy.groovy
@@ -1,47 +1,29 @@
+import uk.org.floop.jenkins_pmd.PMD
+
 def call(csvs, String mapping=null, String datasetPath=null) {
     if (!datasetPath) {
         datasetPath = util.slugise(env.JOB_NAME)
     }
-    configFileProvider([configFile(fileId: 'pmd', variable: 'configfile')]) {
-        def config = readJSON(text: readFile(file: configfile))
-        String PMD = config['pmd_api']
-        String credentials = config['credentials']
-        String PIPELINE = config['pipeline_api']
-        String baseURI = config['base_uri']
-        if (!mapping) {
-            if (fileExists('metadata/columns.csv')) {
-                mapping = 'metadata/columns.csv'
-            } else {
-                mapping = config['default_mapping']
-            }
-        }
-        if (mapping.startsWith('http')) {
-            def response = httpRequest(
-                    httpMode: 'GET',
-                    url: mapping)
-            dir ('metadata') {
-                writeFile file: 'columns.csv', text: response.content
-            }
+    PMD pmd = pmdConfig("pmd")
+    if (!mapping) {
+        if (fileExists('metadata/columns.csv')) {
             mapping = 'metadata/columns.csv'
+        } else {
+            mapping = config['default_mapping']
         }
+    }
+    if (!mapping.startsWith('http')) {
+        mapping = "${WORKSPACE}/${mapping}"
+    }
 
-        dataset.delete(datasetPath)
+    dataset.delete(datasetPath)
 
-        def draft = jobDraft.find()
+    def draft = jobDraft.find()
 
-        drafter.addData(PMD, credentials, draft.id,
-                readFile("out/dataset.trig"), "application/trig;charset=UTF-8")
+    pmd.drafter.addData(draft.id as String,"${WORKSPACE}/out/dataset.trig","application/trig","UTF-8")
 
-        csvs.each { csv ->
-            echo "Uploading ${csv}"
-            runPipeline("${PIPELINE}/ons-table2qb.core/data-cube/import",
-                    draft.id, credentials, [
-                    [name: 'observations-csv',
-                     file: [name: csv, type: 'text/csv;charset=UTF-8']],
-                    [name: 'dataset-name', value: ''],
-                    [name: 'dataset-slug', value: datasetPath],
-                    [name: 'columns-csv', file: [name: mapping, type: 'text/csv;charset=UTF-8']]
-            ])
-        }
+    csvs.each { csv ->
+        echo "Uploading ${csv}"
+        pmd.pipelines.dataCube(draft.id as String, "${WORKSPACE}/${csv}", '', datasetPath, mapping)
     }
 }