Package com.socrata.model.importer

Examples of com.socrata.model.importer.DatasetInfo


        //Create a name with a GUID on the end so we know we are not conflicting with someone else running this.
        final String uniqueName = "Nominations-" + UUID.randomUUID().toString();

        final SodaImporter    importer = SodaImporter.newImporter("https://sandbox.demo.socrata.com", "testuser@gmail.com", "OpenData", "D8Atrg62F2j017ZTdkMpuZ9vY");
        final DatasetInfo     nominationsDataset = importer.createViewFromCsv(uniqueName, "This is a test dataset using samples with the nominations schema", NOMINATIONS_CSV, "Name");
        importer.publish(nominationsDataset.getId());
        //Now the dataset is ready to go!

        importer.deleteDataset(nominationsDataset.getId());
    }
View Full Code Here


      final SodaWorkflow workflower = SodaWorkflow.newWorkflow(connectionInfo.getUrl(), connectionInfo.getUser(),
          connectionInfo.getPassword(), connectionInfo.getToken());

      boolean noExceptions = false;
      try {
        DatasetInfo datasetInfo = updater.loadDatasetInfo(datasetID);

        if (datasetInfo == null) {
          runErrorMessage = "Dataset with that ID does not exist or you do not have permission to publish to it";
          runStatus = JobStatus.PUBLISH_ERROR;
        }
        else {
          if (datasetInfo.PUBLISHED.equals(datasetInfo.getPublicationStage())) {
            DatasetInfo workingCopyDatasetInfo = workflower.createWorkingCopy(datasetInfo.getId());
            datasetInfo = updater.loadDatasetInfo(workingCopyDatasetInfo.getId());
            workingCopyDatasetId = datasetInfo.getId();
          }

          datasetInfo.setName(title);
          datasetInfo.setDescription(description);
View Full Code Here

        adaptSchemaForAggregates(sourceSet);

        // TODO uncomment (after soda-java is updated to support this)
    //DatasetInfo sinkSet = creator.createDataset(sourceSet, useNewBackend);
        DatasetInfo sinkSet = creator.createDataset(sourceSet);

        String sinkSetID = sinkSet.getId();
        System.out.println(" to dataset " + sinkSetID);
    return sinkSetID;
  }
View Full Code Here

    return sinkSetID;
  }

  public static String publishDataset(SodaDdl publisher, String sinkSetID)
      throws SodaError, InterruptedException {
    DatasetInfo publishedSet = publisher.publish(sinkSetID);
    String publishedID = publishedSet.getId();
    return publishedID;
  }
View Full Code Here

        }
    }

    private void validatePortedSchema(final String newDatasetID) throws SodaError, InterruptedException {
        // Grab the necessary objects for testing.
        DatasetInfo sourceMeta = sourceDdl.loadDatasetInfo(UNITTEST_DATASET_ID);
        DatasetInfo sinkMeta = sinkDdl.loadDatasetInfo(newDatasetID);

        try {
            // First, test the metadata (just the basics) via DatasetInfo.
            TestCase.assertEquals(sourceMeta.getViewType(), sinkMeta.getViewType());
            TestCase.assertEquals(sourceMeta.getName(), sinkMeta.getName());
            TestCase.assertEquals(sourceMeta.getDescription(), sinkMeta.getDescription());
            TestCase.assertEquals(sourceMeta.getCategory(), sinkMeta.getCategory());
            TestCase.assertEquals(sourceMeta.getTags(), sinkMeta.getTags());
            TestCase.assertEquals(sourceMeta.getRights(), sinkMeta.getRights());

            // Next, test the schema by grabbing Dataset and Column objects from DatasetInfo.
            Dataset sourceSchema = (Dataset) sourceMeta;
            Dataset sinkSchema = (Dataset) sinkMeta;
            List<Column> sourceColumns = sourceSchema.getColumns();
View Full Code Here

        String destinationDatasetName = "New Dataset";
        // Perform the test operation, saving the String return value.
        String newDatasetID = PortUtility.portSchema(sourceDdl, sinkDdl, UNITTEST_DATASET_ID, destinationDatasetName, false);

        // Grab the necessary objects for testing.
        DatasetInfo sourceMeta = sourceDdl.loadDatasetInfo(UNITTEST_DATASET_ID);
        DatasetInfo sinkMeta = sinkDdl.loadDatasetInfo(newDatasetID);

        try {
            // Test the metadata (just the basics) via DatasetInfo.
            TestCase.assertEquals(destinationDatasetName, sinkMeta.getName());
        } finally {
            sinkDdl.deleteDataset(newDatasetID);
        }
    }
View Full Code Here

    @Test
    public void testPublishDataset() throws SodaError, InterruptedException {
        // Port a dataset's schema and confirm that it is unpublished by default.
        String unpublishedID = PortUtility.portSchema(sourceDdl, sinkDdl, UNITTEST_DATASET_ID, "", false);
        DatasetInfo source = sourceDdl.loadDatasetInfo(unpublishedID);
        TestCase.assertEquals("unpublished", source.getPublicationStage());

        // Perform the test operation.  Confirm the dataset is published afterwards.
        String publishedID = PortUtility.publishDataset(sinkDdl, unpublishedID);
        DatasetInfo sink = sinkDdl.loadDatasetInfo(publishedID);
        TestCase.assertEquals("published", sink.getPublicationStage());

        sinkDdl.deleteDataset(publishedID);
    }
View Full Code Here

TOP

Related Classes of com.socrata.model.importer.DatasetInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.