Import Data As Batches In Existing Table

To initiate an import job to import the data present in multiple batch files into the specified table.

REQUEST URI

https://<ZohoAnalytics_Server_URI>/restapi/v2/workspaces/<workspace-id>/views/<view-id>/data

Post

oauthscopeZohoAnalytics.data.create

QUERY PARAMETERS

Parameter NameDescription
CONFIG*JSONObject

Config parameter specifications are available in the below section.

FIELDS FOR CONFIG JSON

KeyDescription
batchKey*String
Key to identify the batch import job. For the initial request the value should be "start".
isLastBatch*Boolean
To mention the completion of batch file upload.
Default value - false.
importType*String
Controls the type of import.
  • append - Appends the data into the table.
  • truncateadd - Deletes all exisiting rows in the table and adds the imported data as new entry.
  • updateadd - Updates the row if the mentioned column values are matched, else a new entry will be added.
autoIdentify*Boolean
To specify whether to auto identify the CSV format.
onErrorString
Controls the action to be taken incase there is an error during import.
  • abort - Incase of any error, abort the whole import.
  • skiprow - In case of any error, skip that specific row(s) which has the problem and continue importing the rest.
  • setcolumnempty - In case of any error, set the value of the error column for the row to empty and continue importing.
matchingColumns
(mandatory only when the importType is updateadd)
JSONArray
The values in the columns to be matched will be used for comparison to check whether data row(s) being imported matches with an existing row(s) in the table.
The existing rows in the table that match will be updated with values from data imported. The remaining rows are appended to the table as new rows.

Sample: ["column1","column2"]
selectedColumnsJSONArray
Controls the columns that need to be imported.

Sample: ["column1","column2"]
skipTopInteger
Number of rows that are to be skipped from the top in the CSV file being imported.
thousandSeparatorInteger
Controls the action to be taken in case there is a thousand separator in the data.
  • 0 - COMMA
  • 1 - DOT
  • 2 - SPACE
  • 3 - SINGLE QUOTE
decimalSeparatorInteger
Controls the action to be taken in case there is a decimal separator in the data.
  • 0 - DOT
  • 1 - COMMA
dateFormatString
Specify this in-case any date field is being imported and its format cannot be auto recognized by Zoho Analytics.

Sample: dd-MMM-YYYY
Refer this link for more details about how to construct a custom date format.
columnDateFormatJSONObject
Specify this in case multiple date fields are being imported having different format each.
Column name as key and date format as value.

Sample: {"columnName1":"dd-MMM-YYYY","columnName2":"MM/dd/yyyy"}
callbackUrlString
A valid URL, that should allow HTTP Post method.
The Bulk Import Job's details is posted to this URL on successful completion of job or on failure of job.

CSV SPECIFIC ATTRIBUTES

Note : These attributes are mandatory if autoIdentify is set to false.

KeyDescription
commentCharChar
If the character mentioned is found at the beginning of the row, the csv row will be skipped.

Sample: #
delimiterInteger
The delimiter character used for separating the fields in a row in the CSV.
  • 0 - COMMA
  • 1 - TAB
  • 2 - SEMICOLON
  • 3 - SPACE
quotedInteger
The Text Qualifier.
  • 0 - NONE
  • 1 - SINGLE QUOTE
  • 2 - DOUBLE QUOTE

POSSIBLE ERROR CODES

71037104 , 7111 , 7232 , 7248 , 7249 , 7301 , 8046 , 8119 , 8504 , 8506 , 8513 , 8516

Sample Request:

Copiedcurl https://analyticsapi.zoho.com/restapi/v2/bulk/workspaces/<workspace-id>/views/<view-id>/data/batch?CONFIG=<encoded_json_value> 
-F 'FILE=@/home/local/import.csv' -X 'POST' 
-H 'ZANALYTICS-ORGID: <org-id>' 
-H 'Authorization: Zoho-oauthtoken <access_token>'
Copiedusing System;
using System.Collections.Generic;
using ZohoAnalytics;
using System.Text.Json;

namespace ZohoAnalyticsTest
{
    class Program
    {
        long orgId = 55522777;
        long workspaceId = 35130000001055707;
        long viewId = 35730000007354002;

        public void ImportBulkDataAsBatches(IAnalyticsClient ac)
        {
            string importType = "append";
            bool autoIdentify = true;
            string filePath = "C:\\Users\\Administrator\\Downloads\\SalesTable.csv";
            int batchSize = 5000;
            IBulkAPI data = ac.GetBulkInstance(orgId, workspaceId);
            long jobId = data.ImportBulkDataAsBatches(viewId, importType, autoIdentify, filePath, batchSize, null);
            Console.WriteLine(jobId);
        }

        static void Main(string[] args)
        {
            string clientId = "1000.xxxxxxx";
            string clientSecret = "xxxxxxx";
            string refreshToken = "1000.xxxxxxx.xxxxxxx";

            try
            {
                IAnalyticsClient ac = new AnalyticsClient(clientId, clientSecret, refreshToken);
                Program obj = new Program();
                obj.ImportBulkDataAsBatches(ac);
            }
            catch (ServerException ex)
            {
                Console.WriteLine("Server exception - " + ex.GetErrorMessage());
            }
            catch (Exception ex)
            {
                Console.WriteLine("Other exception - " + ex.Message);
            }
        }
    }
}
Copiedpackage main

import (
    "fmt"
    ZAnalytics "zoho/pkg/analyticsclient"
)

var(
    clientId = "1000.xxxxxxx"
    clientSecret = "xxxxxxx"
    refreshToken = "1000.xxxxxxx.xxxxxxx"

    orgId = "55522777"
    workspaceId = "35130000001055707"
    viewId = "35730000007354002"
)

func ImportBulkDataAsBatches(ac ZAnalytics.Client) {
    config := map[string]interface{}{}
    importtype := "truncateadd"
    autoidentify := "true"
    filepath := "/home/local/admin/Files/Sales.csv"
    batchsize := 5000
    bulk := ZAnalytics.GetBulkInstance(&ac, orgId, workspaceId)
    result, err := bulk.ImportBulkDataAsBatches(viewId, importtype, autoidentify, filepath, batchsize, config)

    if(err != nil){
        fmt.Println("Error - " + err.ErrorMessage)
    }else{
        fmt.Println(result)
    }
}

func main() {

    ac := ZAnalytics.GetAnalyticsClient(clientId, clientSecret, refreshToken)
    ImportBulkDataAsBatches(ac)

}
Copiedimport com.zoho.analytics.client.*;
import org.json.*;

public class Test {

    private long orgId = 55522777l;
    private long workspaceId = 35130000001055707l;
    private long viewId = 35130000001066710l;

    public static void main(String args[]){

        String clientId = "1000.xxxxxxx";
        String clientSecret = "xxxxxxx";
        String refreshToken = "1000.xxxxxxx.xxxxxxx";

        Test tObj = new Test();
        AnalyticsClient ac = new AnalyticsClient(clientId, clientSecret, refreshToken);

        try {
            tObj.importDataAsBatches(ac);
        }
        catch (ServerException ex) {
            System.out.println("Server exception - ErrorCode : " + ex.getErrorCode() + ", ErrorMessage : "  + ex.getErrorMessage());
        }
        catch (ParseException ex) {
            System.out.println("Parser exception - ErrorMessage : "  + ex.getResponseMessage());
        }
        catch (Exception ex) {
            System.out.println("Other exception - ");
            ex.printStackTrace();
        }
    }

    public void importDataAsBatches(AnalyticsClient ac) throws Exception {
        String importType = "append";
        boolean autoIdentify = true;
        String filePath = "/home/local/admin/Files/Sales.csv";
        int batchSize = 10000;
        JSONObject config = new JSONObject();
        JSONObject toolConfig = new JSONObject();
        BulkAPI data = ac.getBulkInstance(orgId, workspaceId);
        long jobId = data.importDataAsBatches(viewId, importType, autoIdentify, filePath, batchSize, config, toolConfig);
        System.out.println(jobId);
    }
}
Copied<?php

    require 'AnalyticsClient.php';

    class Test
    {
        public $ac = NULL;
        public $client_id = "1000.xxxxxxx";
        public $client_secret = "xxxxxxx";
        public $refresh_token = "1000.xxxxxxx.xxxxxxx";

        public $org_id = "55522777";
        public $workspace_id = "35130000001055707";
        public $view_id = "31670000001055909";

        function __construct() {
            $this->ac =  new AnalyticsClient($this->client_id, $this->client_secret, $this->refresh_token);
        }

        function importBulkDataAsBatches() {
            $import_type = "truncateadd";
            $auto_identify = "true";
            $file_path = "/home/local/admin/Files/Sales.csv";
            $batch_size = 50000;

            $bulk = $this->ac->getBulkInstance($this->org_id, $this->workspace_id);
            $response = $bulk->importBulkDataAsBatches($this->view_id, $import_type, $auto_identify, $file_path, $batch_size);
            print_r($response);
        }
    }

    $test_obj = new Test();

    try {
        $test_obj->importBulkDataAsBatches();
    }
    catch(ServerException $se) {
        echo "Server exception : " . $se->getErrorMessage() . "\n";
    }
    catch(IOException $ioe) {
        echo "IO exception : " . $ioe->getErrorMessage() . "\n";
    }
    catch(ParseException $pe) {
        echo "Parser exception : " . $pe->getErrorMessage() . "\n";
    }
    catch(Exception $e) {
        echo "Exception : " . $e->getErrorMessage() . "\n";
    }
?>
Copiedfrom __future__ import with_statement
from AnalyticsClient import AnalyticsClient
import sys
import json

class Config:

    CLIENTID = "1000.xxxxxxx";
    CLIENTSECRET = "xxxxxxx";
    REFRESHTOKEN = "1000.xxxxxxx.xxxxxxx";

    ORGID = "55522777";
    WORKSPACEID = "35130000001055707";
    VIEWID = "31670000001055909";

class sample:

    ac = AnalyticsClient(Config.CLIENTID, Config.CLIENTSECRET, Config.REFRESHTOKEN)

    def import_data_as_batches(self, ac):
        import_type = "append"
        auto_identify = "true"
        file_path = "/home/local/admin/Files/Sales.csv"
        bulk = ac.get_bulk_instance(Config.ORGID, Config.WORKSPACEID)
        batch_size = 400;
        config = {}
        result = bulk.import_data_as_batches(Config.VIEWID, import_type, auto_identify, file_path, batch_size, config)
        print(result)

try:
    obj = sample()
    obj.import_data_in_new_table_as_batches(obj.ac);

except Exception as e:
    print(str(e))
Copiedvar nodelib = require('./ZAnalyticsClient');

var clientId = '1000.xxxxxxx';
var clientSecret = 'xxxxxxx';
var refreshtoken = '1000.xxxxxxx.xxxxxxx';
var orgId = '55522777';
var workspaceId = '35130000001055707';
var viewId = '31670000001055909';

var ac = new analyticsClient(clientId, clientSecret, refreshtoken);
var importType = 'append';
var batchSize = 5000;
var autoIdentify = true;
var filePath = '/home/local/admin/Files/Sales.csv';
var bulk = ac.getBulkInstance(orgId, workspaceId);

bulk.importBulkDataAsBatches(viewId, importType, autoIdentify, filePath, batchSize).then((response) => {
    console.log(response);

}).catch((error) => {
    console.log('error : '+error);
    console.log('errorCode : '+error.errorCode);
    console.log('errorMessage : '+error.errorMessage);
});

Sample value for CONFIG parameter:

Copied{
	"importType": "append",
	"autoIdentify": "true",
	"batchKey": "start",
	"isLastBatch": "true"
}

Sample Response:

CopiedHTTP/1.1 200 OK
Content-Type:application/json;charset=UTF-8

{
	"status": "success",
	"summary": "Create bulk import job",
	"data": {
	"batchKey": "1694703618038_1767024000008742002",
	"jobId": "1767024000008787011"
	}
}