WebAPI and OData Functions - asp.net-web-api
I am using a webapi to get data. The API limits results to 100. In this case there are 2481, but because of this limitation I cannot get all of the records needed without using OData functions. I have been told to use the skip function, but the documentation on this isn't very helpful for someone who hasn't used it before.
My call is as follows:
https://accounting.sageone.co.za/api/2.0.0/TaxInvoice/get?apikey={xxxxxxxxxx}&companyid=12345&includeDetail=true&$orderby=Created%20desc
Below is a snipet of the first result returned:
> {
> "TotalResults": 2481,
> "ReturnedResults": 100,
> "Results": [
> {
> "DueDate": "2018-10-31T00:00:00Z",
> "FromDocument": "",
> "AllowOnlinePayment": true,
> "Paid": false,
> "Status": "Unpaid",
> "Locked": false,
> "CustomerId": 3148838,
> "CustomerName": "Cell C Limited",
> "SalesRepresentativeId": 37307,
> "SalesRepresentative": {
> "ID": 37307,
> "FirstName": "David",
> "LastName": "Markman",
> "Name": "David Markman",
> "Active": true,
> "Email": "davidm#infoslipsconnect.com",
> "Mobile": "",
> "Telephone": "",
> "Created": "2018-10-18T12:42:31.233",
> "Modified": "2018-10-18T12:46:49.01"
> },
> "Modified": "2018-10-18T12:46:49.01",
> "Created": "2018-10-18T12:42:31.233",
> "ID": 483959431,
> "Date": "2018-10-18T00:00:00Z",
> "Inclusive": false,
> "DiscountPercentage": 0,
> "TaxReference": "4870194356",
> "DocumentNumber": "INV03357",
> "Reference": "14480 - October Print & Post",
> "Message": "",
> "Discount": 0,
> "Exclusive": 98243.04,
> "Tax": 14736.46,
> "Rounding": 0,
> "Total": 112979.5,
> "AmountDue": 112979.5,
> "PostalAddress01": "Cell C ",
> "PostalAddress02": "Waterfall Campus",
> "PostalAddress03": "C/O Maxwell Drive and Pretoria Road",
> "PostalAddress04": "Buccleuch",
> "PostalAddress05": "",
> "DeliveryAddress01": "September 2018 Print & Post Distribution",
> "DeliveryAddress02": "",
> "DeliveryAddress03": "",
> "DeliveryAddress04": "",
> "DeliveryAddress05": "PO: TBC",
> "Printed": true,
> "Editable": true,
> "HasAttachments": false,
> "HasNotes": false,
> "HasAnticipatedDate": false,
> "Lines": [
> {
> "SelectionId": 4804411,
> "TaxTypeId": 2691481,
> "ID": 328907379,
> "Description": "CELL C PRINT & POST ",
> "LineType": 0,
> "Quantity": 1,
> "UnitPriceExclusive": 0,
> "Unit": "",
> "UnitPriceInclusive": 0,
> "TaxPercentage": 0.15,
> "DiscountPercentage": 0,
> "Exclusive": 0,
> "Discount": 0,
> "Tax": 0,
> "Total": 0,
> "Comments": "",
> "AnalysisCategoryId1": 16041,
> "UnitCost": 0
> },
> {
> "SelectionId": 12926677,
> "TaxTypeId": 2691481,
> "ID": 328907380,
> "Description": "CellC: PDF Compiled",
> "LineType": 0,
> "Quantity": 52926,
> "UnitPriceExclusive": 0.34,
> "Unit": "",
> "UnitPriceInclusive": 0.39,
> "TaxPercentage": 0.15,
> "DiscountPercentage": 0,
> "Exclusive": 17994.84,
> "Discount": 0,
> "Tax": 2699.23,
> "Total": 20694.07,
> "Comments": "",
> "AnalysisCategoryId1": 16041,
> "UnitCost": 0
> },
> {
> "SelectionId": 12926854,
> "TaxTypeId": 2691481,
> "ID": 328907382,
> "Description": "CellC: Rounding Difference",
> "LineType": 0,
> "Quantity": 1,
> "UnitPriceExclusive": -238.16,
> "Unit": "",
> "UnitPriceInclusive": -273.88,
> "TaxPercentage": 0.15,
> "DiscountPercentage": 0,
> "Exclusive": -238.16,
> "Discount": 0,
> "Tax": -35.72,
> "Total": -273.88,
> "Comments": "",
> "AnalysisCategoryId1": 16041,
> "UnitCost": 0
> },
> {
> "SelectionId": 12926687,
> "TaxTypeId": 2691481,
> "ID": 328907383,
> "Description": "CellC: PDF Print",
> "LineType": 0,
> "Quantity": 109782.5,
> "UnitPriceExclusive": 0.73,
> "Unit": "",
> "UnitPriceInclusive": 0.83,
> "TaxPercentage": 0.15,
> "DiscountPercentage": 0,
> "Exclusive": 80141.23,
> "Discount": 0,
> "Tax": 12021.18,
> "Total": 92162.41,
> "Comments": "",
> "AnalysisCategoryId1": 16041,
> "UnitCost": 0
> },
> {
> "SelectionId": 12926854,
> "TaxTypeId": 2691481,
> "ID": 328907384,
> "Description": "CellC: Rounding Difference",
> "LineType": 0,
> "Quantity": 1,
> "UnitPriceExclusive": -87.82,
> "Unit": "",
> "UnitPriceInclusive": -100.99,
> "TaxPercentage": 0.15,
> "DiscountPercentage": 0,
> "Exclusive": -87.82,
> "Discount": 0,
> "Tax": -13.17,
> "Total": -100.99,
> "Comments": "",
> "AnalysisCategoryId1": 16041,
> "UnitCost": 0
> },
> {
> "SelectionId": 12926687,
> "TaxTypeId": 2691481,
> "ID": 328907385,
> "Description": "CellC: Hand Delivery",
> "LineType": 0,
> "Quantity": 2,
> "UnitPriceExclusive": 0.34,
> "Unit": "",
> "UnitPriceInclusive": 0.39,
> "TaxPercentage": 0.15,
> "DiscountPercentage": 0,
> "Exclusive": 0.68,
> "Discount": 0,
> "Tax": 0.1,
> "Total": 0.78,
> "Comments": "",
> "AnalysisCategoryId1": 16041,
> "UnitCost": 0
> },
> {
> "SelectionId": 12926854,
> "TaxTypeId": 2691481,
> "ID": 328907386,
> "Description": "CellC: Rounding Difference",
> "LineType": 0,
> "Quantity": 1,
> "UnitPriceExclusive": -0.01,
> "Unit": "",
> "UnitPriceInclusive": -0.01,
> "TaxPercentage": 0.15,
> "DiscountPercentage": 0,
> "Exclusive": -0.01,
> "Discount": 0,
> "Tax": 0,
> "Total": -0.01,
> "Comments": "",
> "AnalysisCategoryId1": 16041,
> "UnitCost": 0
> },
> {
> "SelectionId": 12926761,
> "TaxTypeId": 2691481,
> "ID": 328907387,
> "Description": "CellC: Nixie",
> "LineType": 0,
> "Quantity": 1,
> "UnitPriceExclusive": 432.28,
> "Unit": "",
> "UnitPriceInclusive": 497.12,
> "TaxPercentage": 0.15,
> "DiscountPercentage": 0,
> "Exclusive": 432.28,
> "Discount": 0,
> "Tax": 64.84,
> "Total": 497.12,
> "Comments": "",
> "AnalysisCategoryId1": 16041,
> "UnitCost": 0
> }
> ]
> },
I need to do the following: Return all records where a specific field ("Created") contains the current year, even if there are more than 100 results. I assume that the $search and skip functions would need to be used, but I am not sure of the syntax.
Thanks,
Steven
According to documentation here, you should concantenate skip argument into URL.
Something like:
https://accounting.sageone.co.za/api/2.0.0/TaxInvoice/get?apikey={xxxxxxxxxx}&companyid=12345&includeDetail=true&$skip=100&$orderby=Created%20desc
The example provided was:
https://services.odata.org/OData/OData.svc/Products?$skip=2&$top=2&$orderby=Rating
You may need to fiddle a little but that appears to be the method.
To return all records where a specific field ("Created") contains the current year, you need to append the below filter criteria to the url. You can see other OData Date function supported here.
&$filter=year(Created) eq 2018
OData handles the pagination for the services which might return huge data. As the API limit is 100 which indicates the page size of data, so you will get maximum 100 records per service call. To get records from 101 to 200, you need to skip first 100 by appending $skip=100 to the url. Also if you need to fetch records from 201 to 300 then append $skip=200 to the url and so on until you get 2481 records. That means you need to call the service 25 times by increasing the $skip value.
So with additional filter and skip query, you need to append below query string to existing url
&$filter=year(Created) eq 2018&$skip=100
Also, if you want to fetch records less than 100 per service call then you need use $top query. Let's say if you have page size of 50 in your application, then append $top=50 to the query string.
Related
How to parse json with multiple records using pljson
I am having issues parsing this json data there is a total of 2 records in "orders" and I need to retrieve data based on order 1 and order 2 with also of each item in "orders.order_items" Any help would be great, it's stressing me out... { "status": "success", "execution_time": "0.0304 seconds", "total_records": 2, "records_returned": 2, "offset": 0, "limit": 150, "orders": [{ "id": "305954583", "email": "email#gmail.com", "date_added": "2022-03-16 20:42:44", "date_updated": "2022-03-16 20:43:12", "checkout_data": [], "order_metadata": [], "discount_list": [], "order_notes": [], "order_items": [{ "id": "163220786", "name": "099922511015", "price": 5, "quantity": 3, "weight": 0, "code": "099922511015", "delivery_type": "ship", "category_code": "", "fulfillment_method": "", "variation_list": [], "metadata": [] }], "order_shipments": [] }, { "id": "170951391", "email": "email2#gmail.com", "date_added": "2021-04-27 22:50:11", "date_updated": "2022-03-17 02:38:43", "checkout_data": [], "order_metadata": [], "discount_list": [], "order_notes": [{ "date_added": "2022-03-17 02:38:43", "username": "username", "content": "testing notes" }], "order_items": [{ "id": "112184373", "name": "COUNTER", "price": 1, "quantity": 1, "weight": 0.25, "code": "COUNTER", "delivery_type": "ship", "category_code": "", "fulfillment_method": "", "variation_list": [], "metadata": [] }], "order_shipments": [] }] } Currently, this is how I have it for i in 1..2 loop dbms_output.put_line('Order #: '||json_ext.get_string(l_json, 'orders['||i||'].id')); temp := json_list(l_json.get('orders['||i||'].order_items')); dbms_output.put_line(temp.get_string); end loop;
You can use: DECLARE obj pljson := pljson( '{ "status": "success", "execution_time": "0.0304 seconds", "total_records": 2, "records_returned": 2, "offset": 0, "limit": 150, "orders": [{ "id": "305954583", "email": "email#gmail.com", "date_added": "2022-03-16 20:42:44", "date_updated": "2022-03-16 20:43:12", "checkout_data": [], "order_metadata": [], "discount_list": [], "order_notes": [], "order_items": [{ "id": "163220786", "name": "099922511015", "price": 5, "quantity": 3, "weight": 0, "code": "099922511015", "delivery_type": "ship", "category_code": "", "fulfillment_method": "", "variation_list": [], "metadata": [] }], "order_shipments": [] }, { "id": "170951391", "email": "email2#gmail.com", "date_added": "2021-04-27 22:50:11", "date_updated": "2022-03-17 02:38:43", "checkout_data": [], "order_metadata": [], "discount_list": [], "order_notes": [{ "date_added": "2022-03-17 02:38:43", "username": "username", "content": "testing notes" }], "order_items": [{ "id": "112184373", "name": "COUNTER", "price": 1, "quantity": 1, "weight": 0.25, "code": "COUNTER", "delivery_type": "ship", "category_code": "", "fulfillment_method": "", "variation_list": [], "metadata": [] }], "order_shipments": [] }] }' ); v_order_list pljson_list; v_order pljson; v_order_items pljson_list; v_item pljson; BEGIN v_order_list := pljson_ext.get_json_list( obj, 'orders'); FOR i IN 1 .. v_order_list.COUNT() LOOP v_order := TREAT(v_order_list.get(i).get_element() AS pljson); DBMS_OUTPUT.PUT_LINE('Order id: ' || v_order.get_string('id')); v_order_items := TREAT(v_order.get('order_items').get_element() AS pljson_list); FOR j IN 1 .. v_order_items.COUNT() LOOP v_item := TREAT(v_order_items.get(j).get_element() AS pljson); DBMS_OUTPUT.PUT_LINE(' Order item id: ' || v_item.get_string('id')); END LOOP; END LOOP; END; / Which outputs: Order id: 305954583 Order item id: 163220786 Order id: 170951391 Order item id: 112184373 db<>fiddle here
Seaweedfs Delete file succeds but existing filer still holds it
We use seaweedfs 1.78 When i use grpc delete a file via filer. curl -X DELETE http://filer1:9889/dataset/qiantao/1.txt It return success. Because I have 10 filer. after delete! curl -H "Accept: application/json" "http://filer2:9889/dataset/qiantao/?pretty=y" |grep qiantao |grep txt % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 100 15723 0 15723 0 0 1917k 0 --:--:-- --:--:-- --:--:-- 2193k "FullPath": "/dataset/qiantao/1.txt", If I start a new filer. It can not got /dataset/qiantao/1.txt; It looks perfect!!!! But in exist filers. Filer get file info below. curl -H "Accept: application/json" "http://filer1:9889/dataset/qiantao/?pretty=y&limit=1" { "Path": "/dataset/qiantao", "Entries": [ { "FullPath": "/dataset/qiantao/1.txt", "Mtime": "2020-12-07T11:15:59+08:00", "Crtime": "2020-12-07T11:15:59+08:00", "Mode": 432, "Uid": 0, "Gid": 0, "Mime": "text/plain", "Replication": "010", "Collection": "", "TtlSec": 0, "UserName": "", "GroupNames": null, "SymlinkTarget": "", "Md5": null, "Extended": null, "chunks": [ { "file_id": "4328,587fb084df9f9dbf", "size": 2, "mtime": 1607310959158810676, "e_tag": "c7c83966", "fid": { "volume_id": 4328, "file_key": 1484763268, "cookie": 3751779775 } } ] } ], "Limit": 1, "LastFileName": "1.txt", "ShouldDisplayLoadMore": true Get volume info below. { "Id": 4328, "Size": 31492542356, "ReplicaPlacement": { "SameRackCount": 0, "DiffRackCount": 1, "DiffDataCenterCount": 0 }, "Ttl": { "Count": 0, "Unit": 0 }, "Collection": "", "Version": 3, "FileCount": 111030, "DeleteCount": 709, "DeletedByteCount": 1628822733, "ReadOnly": false, "CompactRevision": 0, "ModifiedAtSecond": 0, "RemoteStorageName": "", "RemoteStorageKey": "" }, So download 4328.idx from volume server. and use see_idx lookup it. ./see_idx -dir /Users/qiantao/Documents/seaweedfs -volumeId=4328 -v=4 |grep 587fb084 key:587fb084 offset:2802901546 size:57 key:587fb084 offset:3937021600 size:4294967295 It looks like key:587fb084 is covered with new? So How can I fix this problem to make it appear normal?
4294967295 is a tombstone, marking the entry has been deleted.
_update_by_query fails to update all documents in ElasticSearch
I have over 30 million documents in Elasticsearch (version - 6.3.3), I am trying to add new field to all existing documents and setting the value to 0. For example: I want to add start field which does not exists previously in Twitter document, and set it's initial value to 0, in all 30 million documents. In my case I was able to update 4 million only. If I try to check the submitted task with TASK API http://localhost:9200/_task/{taskId}, result from says something like -> { "completed": false, "task": { "node": "Jsecb8kBSdKLC47Q28O6Pg", "id": 5968304, "type": "transport", "action": "indices:data/write/update/byquery", "status": { "total": 34002005, "updated": 3618000, "created": 0, "deleted": 0, "batches": 3619, "version_conflicts": 0, "noops": 0, "retries": { "bulk": 0, "search": 0 }, "throttled_millis": 0, "requests_per_second": -1.0, "throttled_until_millis": 0 }, "description": "update-by-query [Twitter][tweet] updated with Script{type=inline, lang='painless', idOrCode='ctx._source.Twitter.start = 0;', options={}, params={}}", "start_time_in_millis": 1574677050104, "running_time_in_nanos": 466805438290, "cancellable": true, "headers": {} } } The query I am executing against ES , is something like: curl -XPOST "http://localhost:9200/_update_by_query?wait_for_completion=false&conflicts=proceed" -H 'Content-Type: application/json' -d' { "script": { "source": "ctx._source.Twitter.start = 0;" }, "query": { "exists": { "field": "Twitter" } } }' Any suggestions would be great, thanks
Filter objects using multiple conditions including comparing two object fields
Have a list of JSON object and am trying to filter them based on minimum value check and a comparison between two of its fields. { "preview": false, "init_offset": 0, "messages": [], "fields": [ { "name": "A" }, { "name": "B" }, { "name": "Diff" }, { "name": "Threshold" } ], "results": [ { "A": "foo", "B": "bar", "Diff": "1095", "Threshold": "1200" }, { "A": "baz", "B": "bux", "Diff": "81793", "Threshold": "0" }, { "A": "quux", "B": "quuz", "Diff": "194" }, { "A": "moo", "B": "goo", "Diff": "5000", "Threshold": "2000" } ] } Closest I've come to is .results | map(.Threshold //= "0") | .[] | select((.Threshold|tonumber > 0) and (.Diff|tonumber > .Threshold|tonumber)) But that gives a Cannot index string with string "Threshold" error. Basically I want to return all results where Diff is greater than a non-zero Threshold. So in this case: { "A": "moo", "B": "goo", "Diff": "5000", "Threshold": "2000" } Using jq 1.5 FWIW.
You're just missing some parentheses. Compare: select((.Threshold|tonumber) > 0 and (.Diff|tonumber) > (.Threshold|tonumber)) Or avoiding the double-conversion: select( (.Threshold|tonumber) as $t | $t > 0 and (.Diff|tonumber) > $t ) You could also simplify the entire program a bit: .results[] | select( ((.Threshold // 0) | tonumber) as $t | $t > 0 and (.Diff|tonumber) > $t )
How can I resolve the increase in index size when using nested objects in elasticsearch?
The total number of data is 1 billion. When I configure an index by setting some fields of data as nested objects, the number of data increases and the index size increases. There are about 20 nested objects in a document. When I index 1 billion documents, the number of indexes is 20 billion, and the index size is about 20TB. However, when I remove nested objects, the number of indexes is 1 billion, and the index size is about 5TB. It's simply removed nested object and can not provide services with this index structure. I know why nested objects have a higher index count than a simple object configuration. But I ask why the index is four times larger and how to fix it. version of elasticsearch : 5.1.1 The Sample Data is as follows: nested object Mapping : idds, ishs, resources, versions { "fileType": { "asdFormat": 1 }, "dac": { "pe": { "cal": { "d1": -4634692645508395000, "d2": -5805223225419042000, "d3": -1705264433 }, "bytes": "6a7068e0", "entry": 0, "count": 7, "css": { "idh": 0, "ish": 0, "ifh": 0, "ioh": 0, "ish": 0, "ied": 0, "exp": 0, "imp": 0, "sec": 0 }, "ff": { "field1": 23117, "field2": 144, "field3": 3, "field4": 0, "field5": 4, "field6": 0, "field7": 65535, "field8": 0, "field9": 184, "field10": 0, "field11": 0, "field12": 0, "field13": 64, "field14": 0, "field15": 40104, "field16": 64563, "field17": 0, "field18": 0, "field19": 0, "field20": 0, "field21": 0, "field22": 0, "field23": 0, "field24": 0, "field25": 0, "field26": 0, "field27": 0, "field28": 0, "field29": 0, "field30": 0, "field31": 224 }, "ifh": { "mc": 332, "nos": 3, "time": 1091599505, "ps": 0, "ns": 0, "soh": 224, "chart": 271 }, "ioh": { "magic": 267, "mlv": 7, "nlv": 10, "soc": 80384, "soid": 137216, "soud": 0, "aep": 70290, "boc": 4096, "bod": 86016, "aib": "16777216", "si": 4096, "fa": 512, "mosv": 5, "nosv": 1, "miv": 5, "niv": 1, "msv": 4, "nsv": 0, "wv": 0, "si": 262144, "sh": 1024, "cs": 0, "ss": 2, "dllchart": 32768, "ssr": "262144", "ssc": "262144", "ssh": "1048576", "shc": "4096", "lf": 0, "nor": 16 }, "idds": [ { "id": 1, "address": 77504, "size": 300 }, { "id": 2, "address": 106496, "size": 134960 }, { "id": 6, "address": 5264, "size": 28 }, { "id": 11, "address": 592, "size": 300 }, { "id": 12, "address": 4096, "size": 1156 } ], "ishs": [ { "id": 0, "name": ".text", "size": 79920, "address": 4096, "srd": 80384, "ptr": 1024, "ptrl": 0, "ptl": 0, "nor": 0, "nol": 0, "chart": 3758096480, "ex1": 60404022, "ex2": 61903965, "ex": 61153993.5 }, { "id": 1, "name": ".data", "size": 17884, "address": 86016, "srd": 2048, "ptr": 81408, "ptrl": 0, "ptl": 0, "nor": 0, "nol": 0, "chart": 3221225536, "ex1": 27817394, "ex2": -1, "ex": 27817394 }, { "id": 2, "name": ".rsrc", "size": 155648, "address": 106496, "srd": 135680, "ptr": 83456, "ptrl": 0, "ptl": 0, "nor": 0, "nol": 0, "chart": 3758096448, "ex1": 38215005, "ex2": 46960547, "ex": 42587776 } ], "resources": [ { "id": 2, "count": 3, "hash": 658696779440676200 }, { "id": 3, "count": 14, "hash": 4671329014159995000 }, { "id": 5, "count": 30, "hash": -6413921454731808000 }, { "id": 6, "count": 17, "hash": 8148183923057157000 }, { "id": 14, "count": 4, "hash": 8004262029246967000 }, { "id": 16, "count": 1, "hash": 7310592488525726000 }, { "id": 2147487240, "count": 2, "hash": -7466967570237519000 } ], "upx": { "path": "xps", "d64": 3570326159822345700 }, "versions": [ { "language": 1042, "codePage": 1200, "companyName": "Microsoft Corporation", "fileDescription": "Files and Settings Transfer Wizard", "fileVersion": "5.1.2600.2180 (xpsp_sp2_rtm.040803-2158)", "internalName": "Microsoft", "legalCopyright": "Copyright (C) Microsoft Corp. 1999-2000", "originalFileName": "calc.exe", "productName": "Microsoft(R) Windows (R) 2000 Operating System", "productVersion": "5.1.2600.2180" } ], "import": { "dll": [ "GDI32.dll", "KERNEL32.dll", "USER32.dll", "ole32.dll", "ADVAPI32.dll", "COMCTL32.dll", "SHELL32.dll", "msvcrt.dll", "comdlg32.dll", "SHLWAPI.dll", "SETUPAPI.dll", "Cabinet.dll", "LOG.dll", "MIGISM.dll" ], "count": 14, "d1": -149422985349905340, "d2": -5344971616648705000, "d3": 947564411044974800 }, "ddSec0": { "d1": -3007779250746558000, "d4": -2515772085422514700 }, "ddSec2": { "d2": -4422408392580008000, "d4": -8199520081862749000 }, "ddSec3": { "d1": -8199520081862749000 }, "cdp": { "d1": 787971, "d2": 39, "d3": 101980696, "d4": 3, "d5": 285349133 }, "cde": { "d1": 67242500, "d2": 33687042, "d3": 218303490, "d4": 1663632132, "d5": 0 }, "cdm": { "d1": 319293444, "d2": 2819, "d3": 168364553, "d4": 50467081, "d5": 198664 }, "cdb": { "d1": 0, "d2": 0, "d3": 0, "d4": 0, "d5": 0 }, "mm": { "d0": -3545367393134139000, "d1": 1008464166428372900, "d2": -6313842304565328000, "d3": -5015640502060250000 }, "ser": 17744, "ideal": 0, "map": 130, "ol": 0 } }, "fileSize": 219136 }