Import Data As Batches In New Table

 

To initiate an import job to create a new table and import the data present in multiple batch files into the created table.

REQUEST URI

https://<ZohoAnalytics_Server_URI>/restapi/v2/bulk/workspaces/<workspace-id>/data/batch

Post

oauthscopeZohoAnalytics.data.create

QUERY PARAMETERS

Parameter NameDescription
CONFIG*JSONObject

Config parameter specifications are available in the below section.

FIELDS FOR CONFIG JSON

KeyDescription
batchKey*String
Key to identify the batch import job. For the initial request the value should be "start".
isLastBatch*Boolean
To mention the completion of batch file upload.
Default value - false.
tableName*String
The name of the table.
autoIdentify*Boolean
To specify whether to auto identify the CSV format.
onErrorString
Controls the action to be taken incase there is an error during import.
  • abort - Incase of any error, abort the whole import.
  • skiprow - In case of any error, skip that specific row(s) which has the problem and continue importing the rest.
  • setcolumnempty - In case of any error, set the value of the error column for the row to empty and continue importing.
selectedColumnsJSONArray
Controls the columns that need to be imported.

Sample: ["column1","column2"]
skipTopInteger
Number of rows that are to be skipped from the top in the CSV file being imported.
thousandSeparatorInteger
Controls the action to be taken in case there is a thousand separator in the data.
  • 0 - COMMA
  • 1 - DOT
  • 2 - SPACE
  • 3 - SINGLE QUOTE
decimalSeparatorInteger
Controls the action to be taken in case there is a decimal separator in the data.
  • 0 - DOT
  • 1 - COMMA
dateFormatString
Specify this in-case any date field is being imported and its format cannot be auto recognized by Zoho Analytics.

Sample: dd-MMM-YYYY
Refer this link for more details about how to construct a custom date format.
columnDateFormatJSONObject
Specify this in case multiple date fields are being imported having different format each.
Column name as key and date format as value.

Sample: {"columnName1":"dd-MMM-YYYY","columnName2":"MM/dd/yyyy"}
callbackUrlString
A valid URL, that should allow HTTP Post method.
The Bulk Import Job's details is posted to this URL on successful completion of job or on failure of job.

CSV SPECIFIC ATTRIBUTES

Note : These attributes are mandatory if autoIdentify is set to false.

KeyDescription
commentCharChar
If the character mentioned is found at the beginning of the row, the csv row will be skipped.

Sample: #
delimiterInteger
The delimiter character used for separating the fields in a row in the CSV.
  • 0 - COMMA
  • 1 - TAB
  • 2 - SEMICOLON
  • 3 - SPACE
quotedInteger
The Text Qualifier.
  • 0 - NONE
  • 1 - SINGLE QUOTE
  • 2 - DOUBLE QUOTE

POSSIBLE ERROR CODES

7103 , 7111 , 7232 , 7248 , 7249 , 7301 , 8046 , 8119 , 8504 , 8506 , 8513 , 8516

Sample Request:

Copiedcurl https://analyticsapi.zoho.com/restapi/v2/bulk/workspaces/<workspace-id>/data/batch?CONFIG=<encoded_json_value> 
-F 'FILE=@/home/local/import.csv' -X 'POST' 
-H 'ZANALYTICS-ORGID: <org-id>' 
-H 'Authorization: Zoho-oauthtoken <access_token>'
Copiedusing System;
using System.Collections.Generic;
using ZohoAnalytics;
using System.Text.Json;

namespace ZohoAnalyticsTest
{
    class Program
    {
        long orgId = 55522777;
        long workspaceId = 35130000001055707;

        public void ImportBulkDataInNewTableAsBatches(IAnalyticsClient ac)
        {
            string tableName = "C#_NewTable";
            bool autoIdentify = true;
            string filePath = "C:\\Users\\Administrator\\Downloads\\SalesTable.csv";
            int batchSize = 5000;
            IBulkAPI data = ac.GetBulkInstance(orgId, workspaceId);
            long jobId = data.ImportBulkDataInNewTableAsBatches(tableName, autoIdentify, filePath, batchSize, null);
            Console.WriteLine(jobId);
        }

        static void Main(string[] args)
        {
            string clientId = "1000.xxxxxxx";
            string clientSecret = "xxxxxxx";
            string refreshToken = "1000.xxxxxxx.xxxxxxx";

            try
            {
                IAnalyticsClient ac = new AnalyticsClient(clientId, clientSecret, refreshToken);
                Program obj = new Program();
                obj.ImportBulkDataInNewTableAsBatches(ac);
            }
            catch (ServerException ex)
            {
                Console.WriteLine("Server exception - " + ex.GetErrorMessage());
            }
            catch (Exception ex)
            {
                Console.WriteLine("Other exception - " + ex.Message);
            }
        }
    }
}
Copiedpackage main

import (
    "fmt"
    ZAnalytics "zoho/pkg/analyticsclient"
)

var(
    clientId = "1000.xxxxxxx"
    clientSecret = "xxxxxxx"
    refreshToken = "1000.xxxxxxx.xxxxxxx"

    orgId = "55522777"
    workspaceId = "35130000001055707"
)

func ImportBulkDataInNewTableAsBatches(ac ZAnalytics.Client) {
    config := map[string]interface{}{}
    tablename := "Sales"
    autoidentify := "true"
    filepath := "/home/local/admin/Files/Sales.csv"
    batchsize:= 5000
    bulk := ZAnalytics.GetBulkInstance(&ac, orgId, workspaceId)
    result, err := bulk.ImportBulkDataInNewTableAsBatches(tablename, autoidentify, filepath, batchsize, config)

    if(err != nil){
        fmt.Println("Error - " + err.ErrorMessage)
    }else{
        fmt.Println(result)
    }
}

func main() {

    ac := ZAnalytics.GetAnalyticsClient(clientId, clientSecret, refreshToken)
    ImportBulkDataInNewTableAsBatches(ac)

}
Copiedimport com.zoho.analytics.client.*;
import org.json.*;

public class Test {

    private long orgId = 55522777l;
    private long workspaceId = 35130000001055707l;

    public static void main(String args[]){

        String clientId = "1000.xxxxxxx";
        String clientSecret = "xxxxxxx";
        String refreshToken = "1000.xxxxxxx.xxxxxxx";

        Test tObj = new Test();
        AnalyticsClient ac = new AnalyticsClient(clientId, clientSecret, refreshToken);

        try {
            tObj.importDataInNewTableAsBatches(ac);
        }
        catch (ServerException ex) {
            System.out.println("Server exception - ErrorCode : " + ex.getErrorCode() + ", ErrorMessage : "  + ex.getErrorMessage());
        }
        catch (ParseException ex) {
            System.out.println("Parser exception - ErrorMessage : "  + ex.getResponseMessage());
        }
        catch (Exception ex) {
            System.out.println("Other exception - ");
            ex.printStackTrace();
        }
    }

    public void importDataInNewTableAsBatches(AnalyticsClient ac) throws Exception {
        String tableName = "Batch Import New Table";
        boolean autoIdentify = true;
        String filePath = "/home/local/admin/Files/Sales.csv";
        int batchSize = 10000;
        JSONObject config = new JSONObject();
        JSONObject toolConfig = new JSONObject();

        BulkAPI data = ac.getBulkInstance(orgId, workspaceId);
        long jobId = data.importDataInNewTableAsBatches(tableName, autoIdentify, filePath, batchSize, config, toolConfig);
        System.out.println(jobId);
    }
}
Copied<?php

    require 'AnalyticsClient.php';

    class Test
    {
        public $ac = NULL;
        public $client_id = "1000.xxxxxxx";
        public $client_secret = "xxxxxxx";
        public $refresh_token = "1000.xxxxxxx.xxxxxxx";

        public $org_id = "55522777";
        public $workspace_id = "35130000001055707";

        function __construct() {
            $this->ac =  new AnalyticsClient($this->client_id, $this->client_secret, $this->refresh_token);
        }

        function importBulkDataInNewTableAsBatches() {
            $table_name = "php_batch_import";
            $auto_identify = "true";
            $file_path = "/home/local/admin/Files/Sales.csv";
            $batch_size = 50000;

            $bulk = $this->ac->getBulkInstance($this->org_id, $this->workspace_id);
            $response = $bulk->importBulkDataInNewTableAsBatches($table_name, $auto_identify, $file_path, $batch_size);
            print_r($response);
        }
    }

    $test_obj = new Test();

    try {
        $test_obj->importBulkDataInNewTableAsBatches();
    }
    catch(ServerException $se) {
        echo "Server exception : " . $se->getErrorMessage() . "\n";
    }
    catch(IOException $ioe) {
        echo "IO exception : " . $ioe->getErrorMessage() . "\n";
    }
    catch(ParseException $pe) {
        echo "Parser exception : " . $pe->getErrorMessage() . "\n";
    }
    catch(Exception $e) {
        echo "Exception : " . $e->getErrorMessage() . "\n";
    }
?>
Copiedfrom __future__ import with_statement
from AnalyticsClient import AnalyticsClient
import sys
import json

class Config:

    CLIENTID = "1000.xxxxxxx";
    CLIENTSECRET = "xxxxxxx";
    REFRESHTOKEN = "1000.xxxxxxx.xxxxxxx";

    ORGID = "55522777";
    WORKSPACEID = "35130000001055707";

class sample:

    ac = AnalyticsClient(Config.CLIENTID, Config.CLIENTSECRET, Config.REFRESHTOKEN)

    def import_data_in_new_table_as_batches(self, ac):
        table_name = "Sales"
        auto_identify = "true"
        file_path = "/home/local/admin/Files/Sales.csv"
        bulk = ac.get_bulk_instance(Config.ORGID, Config.WORKSPACEID)
        batch_size = 5000;
        config = {}
        result = bulk.import_data_in_new_table_as_batches(table_name, auto_identify, file_path, batch_size, config)
        print(result)

try:
    obj = sample()
    obj.import_data_in_new_table_as_batches(obj.ac);

except Exception as e:
    print(str(e))
Copiedvar nodelib = require('./ZAnalyticsClient');

var clientId = '1000.xxxxxxx';
var clientSecret = 'xxxxxxx';
var refreshtoken = '1000.xxxxxxx.xxxxxxx';
var orgId = '55522777';
var workspaceId = '35130000001055707';

var ac = new analyticsClient(clientId, clientSecret, refreshtoken);
var tableName = 'Sales';
var batchSize = 5000;
var autoIdentify = true;
var filePath = '/home/local/admin/Files/Sales.csv';
var bulk = ac.getBulkInstance(orgId, workspaceId);

bulk.importDataInNewTableAsBatches(tableName, autoIdentify, filePath, batchSize).then((response) => {
    console.log(response);

}).catch((error) => {
    console.log('errorCode : '+error.errorCode);
    console.log('errorMessage : '+error.errorMessage);
    console.log('error : '+error);
});

Download SDK:  C# | GO | JAVA | PHP | PYTHON | NodeJS

Sample value for CONFIG parameter:

Copied{
	"tableName": "SalesTable_1",
	"autoIdentify": "true",
	"onError": "setcolumnempty",
	"batchKey": "start",
	"isLastBatch": "true"
}

Sample Response:

CopiedHTTP/1.1 200 OK
Content-Type:application/json;charset=UTF-8

{
	"status": "success",
	"summary": "Create bulk import job",
	"data": {
	"batchKey": "1694703482470_1767024000008426012_SalesTable_1",
	"jobId": "1767024000008787011"
	}
}