Kafka to Elasticsearch connector not able to start - elasticsearch

Elasticsearch -> elasticsearch-7.6.0
I am trying to move data from Kafka topic to Elasticsearch by using Elasticsearch Sink connector.
But starting the connector is showing below error
2022-05-28 09:39:27,864] ERROR [SINK_ELASTIC_TEST_01|task-0] Failed to create mapping for index users.user_details3 with schema Schema{STRING} due to 'ElasticsearchStatusException[Elasticsearch exception [type=mapper_parsing_exception, reason=Root mapping definition has unsupported parameters: [type : text] [fields : {keyword={ignore_above=256, type=keyword}}]]]' after 6 attempt(s) (io.confluent.connect.elasticsearch.RetryUtil:164)
ElasticsearchStatusException[Elasticsearch exception [type=mapper_parsing_exception, reason=Root mapping definition has unsupported parameters: [type : text] [fields : {keyword={ignore_above=256, type=keyword}}]]]
at org.elasticsearch.rest.BytesRestResponse.errorFromXContent(BytesRestResponse.java:178)
at org.elasticsearch.client.RestHighLevelClient.parseEntity(RestHighLevelClient.java:2484)
at org.elasticsearch.client.RestHighLevelClient.parseResponseException(RestHighLevelClient.java:2461)
at org.elasticsearch.client.RestHighLevelClient.internalPerformRequest(RestHighLevelClient.java:2184)
at org.elasticsearch.client.RestHighLevelClient.performRequest(RestHighLevelClient.java:2154)
at org.elasticsearch.client.RestHighLevelClient.performRequestAndParseEntity(RestHighLevelClient.java:2118)
at org.elasticsearch.client.IndicesClient.putMapping(IndicesClient.java:440)
at io.confluent.connect.elasticsearch.ElasticsearchClient.lambda$createMapping$3(ElasticsearchClient.java:238)
at io.confluent.connect.elasticsearch.RetryUtil.callWithRetries(RetryUtil.java:158)
at io.confluent.connect.elasticsearch.RetryUtil.callWithRetries(RetryUtil.java:119)
at io.confluent.connect.elasticsearch.ElasticsearchClient.callWithRetries(ElasticsearchClient.java:426)
at io.confluent.connect.elasticsearch.ElasticsearchClient.createMapping(ElasticsearchClient.java:236)
at io.confluent.connect.elasticsearch.ElasticsearchSinkTask.checkMapping(ElasticsearchSinkTask.java:151)
at io.confluent.connect.elasticsearch.ElasticsearchSinkTask.tryWriteRecord(ElasticsearchSinkTask.java:294)
at io.confluent.connect.elasticsearch.ElasticsearchSinkTask.put(ElasticsearchSinkTask.java:118)
at org.apache.kafka.connect.runtime.WorkerSinkTask.deliverMessages(WorkerSinkTask.java:584)
at org.apache.kafka.connect.runtime.WorkerSinkTask.poll(WorkerSinkTask.java:334)
at org.apache.kafka.connect.runtime.WorkerSinkTask.iteration(WorkerSinkTask.java:235)
at org.apache.kafka.connect.runtime.WorkerSinkTask.execute(WorkerSinkTask.java:204)
at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:200)
at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:255)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:750)
Suppressed: org.elasticsearch.client.ResponseException: method [PUT], host [http://localhost:9200], URI [/users.user_details3/_mapping?master_timeout=30s&timeout=30s], status line [HTTP/1.1 400 Bad Request]
Topic data
rowtime: 2022/05/27 13:46:48.136 Z, key: {"schema":{"type":"string","optional":false},"payload":"{"_id": {"_data": "826290D642000000022B022C0100296E5A100429BEBD4C4B7F4C0BA80B881299B5FF9246645F696400646290D642F23DB1FBE13668180004"}}"}, value: {"schema":{"type":"string","optional":false},"payload":"{"_id": {"_data": "826290D642000000022B022C0100296E5A100429BEBD4C4B7F4C0BA80B881299B5FF9246645F696400646290D642F23DB1FBE13668180004"}, "operationType": "insert", "clusterTime": {"$timestamp": {"t": 1653659202, "i": 2}}, "fullDocument": {"_id": {"$oid": "6290d642f23db1fbe1366818"}, "userid": 1.0, "name": "Gaurav"}, "ns": {"db": "users", "coll": "user_details3"}, "documentKey": {"_id": {"$oid": "6290d642f23db1fbe1366818"}}}"}, partition: 0
rowtime: 2022/05/27 13:47:06.142 Z, key: {"schema":{"type":"string","optional":false},"payload":"{"_id": {"_data": "826290D654000000012B022C0100296E5A100429BEBD4C4B7F4C0BA80B881299B5FF9246645F696400646290D654F23DB1FBE13668190004"}}"}, value: {"schema":{"type":"string","optional":false},"payload":"{"_id": {"_data": "826290D654000000012B022C0100296E5A100429BEBD4C4B7F4C0BA80B881299B5FF9246645F696400646290D654F23DB1FBE13668190004"}, "operationType": "insert", "clusterTime": {"$timestamp": {"t": 1653659220, "i": 1}}, "fullDocument": {"_id": {"$oid": "6290d654f23db1fbe1366819"}, "userid": 1.0, "name": "Gaurav"}, "ns": {"db": "users", "coll": "user_details3"}, "documentKey": {"_id": {"$oid": "6290d654f23db1fbe1366819"}}}"}, partition: 0
rowtime: 2022/05/27 13:47:24.149 Z, key: {"schema":{"type":"string","optional":false},"payload":"{"_id": {"_data": "826290D668000000012B022C0100296E5A100429BEBD4C4B7F4C0BA80B881299B5FF9246645F696400646290D668F23DB1FBE136681A0004"}}"}, value: {"schema":{"type":"string","optional":false},"payload":"{"_id": {"_data": "826290D668000000012B022C0100296E5A100429BEBD4C4B7F4C0BA80B881299B5FF9246645F696400646290D668F23DB1FBE136681A0004"}, "operationType": "insert", "clusterTime": {"$timestamp": {"t": 1653659240, "i": 1}}, "fullDocument": {"_id": {"$oid": "6290d668f23db1fbe136681a"}, "userid": 1.0, "name": "Gaurav"}, "ns": {"db": "users", "coll": "user_details3"}, "documentKey": {"_id": {"$oid": "6290d668f23db1fbe136681a"}}}"}, partition: 0
rowtime: 2022/05/27 13:48:00.156 Z, key: {"schema":{"type":"string","optional":false},"payload":"{"_id": {"_data": "826290D68A000000012B022C0100296E5A100429BEBD4C4B7F4C0BA80B881299B5FF9246645F696400646290D68AF23DB1FBE136681B0004"}}"}, value: {"schema":{"type":"string","optional":false},"payload":"{"_id": {"_data": "826290D68A000000012B022C0100296E5A100429BEBD4C4B7F4C0BA80B881299B5FF9246645F696400646290D68AF23DB1FBE136681B0004"}, "operationType": "insert", "clusterTime": {"$timestamp": {"t": 1653659274, "i": 1}}, "fullDocument": {"_id": {"$oid": "6290d68af23db1fbe136681b"}, "userid": 1.0, "name": "Gaurav"}, "ns": {"db": "users", "coll": "user_details3"}, "documentKey": {"_id": {"$oid": "6290d68af23db1fbe136681b"}}}"}, partition: 0
rowtime: 2022/05/27 13:50:00.182 Z, key: {"schema":{"type":"string","optional":false},"payload":"{"_id": {"_data": "826290D706000000012B022C0100296E5A100429BEBD4C4B7F4C0BA80B881299B5FF9246645F696400646290D706F23DB1FBE136681C0004"}}"}, value: {"schema":{"type":"string","optional":false},"payload":"{"_id": {"_data": "826290D706000000012B022C0100296E5A100429BEBD4C4B7F4C0BA80B881299B5FF9246645F696400646290D706F23DB1FBE136681C0004"}, "operationType": "insert", "clusterTime": {"$timestamp": {"t": 1653659398, "i": 1}}, "fullDocument": {"_id": {"$oid": "6290d706f23db1fbe136681c"}, "userid": 2.0, "name": "Gaurav"}, "ns": {"db": "users", "coll": "user_details3"}, "documentKey": {"_id": {"$oid": "6290d706f23db1fbe136681c"}}}"}, partition: 0
ElasticSearchSink Configuration
{
"name": "SINK_ELASTIC_TEST_01",
"config": {
"type.name": "kafka-connect",
"name": "SINK_ELASTIC_TEST_01",
"connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector",
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
"errors.log.enable": "true",
"errors.log.include.messages": "true",
"topics": "users.user_details3",
"connection.url": "http://localhost:9200",
"connection.username": "",
"connection.password": "",
"key.ignore": "true",
"schema.ignore": "false"
}
}
Please see I am creating topic from MongoDB source connector , so on a high level I am trying to achieve this
MongoDB --> MongoDBSourceConnector--> Kafka --> ElasticSearchSinkConnector --> ElasticSearch
Please find the MongoDBSourceConnector Configuration for reference
{
"name": "source-mongodb-kafka-stream",
"config": {
"name": "source-mongodb-kafka-stream",
"connector.class": "com.mongodb.kafka.connect.MongoSourceConnector",
"tasks.max": "1",
"key.converter": "org.apache.kafka.connect.json.JsonConverter",
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
"connection.uri": "mongodb://localhost:27017",
"database": "users",
"collection": "",
"topic.prefix": ""
}
}
Update 06-03-2022 after OneCricketeer comments
I have created an index in Elasticsearch with the same name as topic
PUT /users_generic/_doc/1
{
"id":{
"_data":"826290D706000000012B022C0100296E5A100429BEBD4C4B7F4C0BA80B881299B5FF9246645F696400646290D706F23DB1FBE136681C0004"
},
"operationType":"insert",
"clusterTime":{
"$timestamp":{
"t":1653659398,
"i":1
}
},
"fullDocument":{
"id":{
"$oid":"6290d706f23db1fbe136681c"
},
"userid":2.0,
"name":"Gaurav"
},
"ns":{
"db":"users",
"coll":"user_details3"
},
"documentKey":{
"id":{
"$oid":"6290d706f23db1fbe136681c"
}
}
}
With above step , mapping is been created , now when I again executed the connector with same parameters as above , it throws me a different effort
2-06-03 12:39:16,691] ERROR [SINK_ELASTIC_TEST_01|task-0] Failed to execute bulk request due to 'org.elasticsearch.common.compress.NotXContentException: Compressor detection can only be called on some xcontent bytes or compressed xcontent bytes' after 6 attempt(s) (io.confluent.connect.elasticsearch.RetryUtil:164)
org.elasticsearch.common.compress.NotXContentException: Compressor detection can only be called on some xcontent bytes

Related

Nifi - Route the JSON based on the Array Name

I am new to Nifi, i hv a requirement where we get multiple JSON inputs with different Header Names. I have to parse the JSON and insert into different tables based on the Header value.
Not sure how to use RouteonContent processor or EvaluateJSON Path processor
Input 1
{
"Location": [
{
"country": "US",
"division": "Central",
"region": "Big South",
"locationID": 1015,
"location_name": "Hattiesburg, MS (XF)",
"location_type": "RETAIL",
"location_sub_type": "COS",
"store_type": "",
"planned_open_date": "",
"planned_close_date": "",
"actual_open_date": "2017-07-26",
"actual_close_date": "",
"new_store_flag": "",
"address1": "2100 Lincoln Road",
"address2": "",
"city": "Hattiesburg",
"state": "MS",
"zip": 39402,
"include_for_planning": "Y"
},
{
"country": "US",
"division": "Central",
"region": "Big South",
"locationID": 1028,
"location_name": "Laurel, MS",
"location_type": "RETAIL",
"location_sub_type": "COS",
"store_type": "",
"planned_open_date": "",
"planned_close_date": "",
"actual_open_date": "",
"actual_close_date": "",
"new_store_flag": "",
"address1": "1225 5th street",
"address2": "",
"city": "Laurel",
"state": "MS",
"zip": 39440,
"include_for_planning": "Y"
}
]
Input 2
{
"Item": [
{
"npi_code": "NEW",
"cifa_category": "XM",
"o9_category": "Accessories"
},
{
"npi_code": "NEW",
"cifa_category": "XM0",
"o9_category": "Accessories"
}
]
Use the website https://jsonpath.com/ to figure out the proper JSON expression. But what you could potentially do is use: if the array contains either $.npi_code then do X and if it contains $. country, then do Y

API Platform 3 does not return null data in response body

In API Platform 2.7 the response body contained fields that had a null value, after upgrading to API Platform 3 the response body does not contain fields that have a null value. Is this deliberate or due to a configuration change I need to make.
I executed the same call from the api/docs page for 2.7 and 3.0 I expected the results to be the same.
API Platform 2.7 response for class Patient
{
"#context": "/api/contexts/Patient",
"#id": "/api/patients/8110",
"#type": "Patient",
"account": "/api/accounts/8110",
"accountId": 8110,
"isDependent": false,
"isGuardian": false,
"organization": "/api/organizations/765",
"email": null,
"title": null,
"firstName": "CA",
"middleInitial": "A",
"lastName": "Patient",
"suffix": null,
"photoMedia": null,
"sex": "male",
"birthDate": "2000-01-01T00:00:00+00:00",
"addressLineOne": "5759 Witting Corners",
"addressLineTwo": null,
"city": "Marvintown",
"zipCode": "35507",
"state": {
"#id": "/api/states/681",
"#type": "State",
"abbreviation": "CA"
},
"phoneNumber": "617-491-0000",
"mobileNumber": null,
"workNumber": null
}
API Platform 3.0 response for class Patient
{
"#context": "/api/contexts/Patient",
"#id": "/api/patients/8110",
"#type": "Patient",
"account": "/api/accounts/8110",
"accountId": 8110,
"isDependent": false,
"isGuardian": false,
"organization": "/api/organizations/765",
"firstName": "CA",
"middleInitial": "A",
"lastName": "Patient",
"sex": "male",
"birthDate": "2000-01-01T00:00:00+00:00",
"addressLineOne": "5759 Witting Corners",
"city": "Marvintown",
"zipCode": "35507",
"state": {
"#id": "/api/states/681",
"#type": "State",
"abbreviation": "CA"
},
"phoneNumber": "617-491-0000",
}
Relates to this question
You only need to set skip_null_values to false in your normalization_context, because starting 3.0.0 beta 1 they decided it would become true by default.
If you need it to be false for all resources, you can set the default in your config/packages/api_platform.yaml:
api_platform:
...
defaults:
normalization_context:
skip_null_values: false

How to parse json with multiple records using pljson

I am having issues parsing this json data there is a total of 2 records in "orders" and I need to retrieve data based on
order 1 and order 2 with also of each item in "orders.order_items"
Any help would be great, it's stressing me out...
{
"status": "success",
"execution_time": "0.0304 seconds",
"total_records": 2,
"records_returned": 2,
"offset": 0,
"limit": 150,
"orders": [{
"id": "305954583",
"email": "email#gmail.com",
"date_added": "2022-03-16 20:42:44",
"date_updated": "2022-03-16 20:43:12",
"checkout_data": [],
"order_metadata": [],
"discount_list": [],
"order_notes": [],
"order_items": [{
"id": "163220786",
"name": "099922511015",
"price": 5,
"quantity": 3,
"weight": 0,
"code": "099922511015",
"delivery_type": "ship",
"category_code": "",
"fulfillment_method": "",
"variation_list": [],
"metadata": []
}],
"order_shipments": []
}, {
"id": "170951391",
"email": "email2#gmail.com",
"date_added": "2021-04-27 22:50:11",
"date_updated": "2022-03-17 02:38:43",
"checkout_data": [],
"order_metadata": [],
"discount_list": [],
"order_notes": [{
"date_added": "2022-03-17 02:38:43",
"username": "username",
"content": "testing notes"
}],
"order_items": [{
"id": "112184373",
"name": "COUNTER",
"price": 1,
"quantity": 1,
"weight": 0.25,
"code": "COUNTER",
"delivery_type": "ship",
"category_code": "",
"fulfillment_method": "",
"variation_list": [],
"metadata": []
}],
"order_shipments": []
}]
}
Currently, this is how I have it
for i in 1..2 loop
dbms_output.put_line('Order #: '||json_ext.get_string(l_json, 'orders['||i||'].id'));
temp := json_list(l_json.get('orders['||i||'].order_items'));
dbms_output.put_line(temp.get_string);
end loop;
You can use:
DECLARE
obj pljson := pljson(
'{
"status": "success",
"execution_time": "0.0304 seconds",
"total_records": 2,
"records_returned": 2,
"offset": 0,
"limit": 150,
"orders": [{
"id": "305954583",
"email": "email#gmail.com",
"date_added": "2022-03-16 20:42:44",
"date_updated": "2022-03-16 20:43:12",
"checkout_data": [],
"order_metadata": [],
"discount_list": [],
"order_notes": [],
"order_items": [{
"id": "163220786",
"name": "099922511015",
"price": 5,
"quantity": 3,
"weight": 0,
"code": "099922511015",
"delivery_type": "ship",
"category_code": "",
"fulfillment_method": "",
"variation_list": [],
"metadata": []
}],
"order_shipments": []
}, {
"id": "170951391",
"email": "email2#gmail.com",
"date_added": "2021-04-27 22:50:11",
"date_updated": "2022-03-17 02:38:43",
"checkout_data": [],
"order_metadata": [],
"discount_list": [],
"order_notes": [{
"date_added": "2022-03-17 02:38:43",
"username": "username",
"content": "testing notes"
}],
"order_items": [{
"id": "112184373",
"name": "COUNTER",
"price": 1,
"quantity": 1,
"weight": 0.25,
"code": "COUNTER",
"delivery_type": "ship",
"category_code": "",
"fulfillment_method": "",
"variation_list": [],
"metadata": []
}],
"order_shipments": []
}]
}'
);
v_order_list pljson_list;
v_order pljson;
v_order_items pljson_list;
v_item pljson;
BEGIN
v_order_list := pljson_ext.get_json_list( obj, 'orders');
FOR i IN 1 .. v_order_list.COUNT() LOOP
v_order := TREAT(v_order_list.get(i).get_element() AS pljson);
DBMS_OUTPUT.PUT_LINE('Order id: ' || v_order.get_string('id'));
v_order_items := TREAT(v_order.get('order_items').get_element() AS pljson_list);
FOR j IN 1 .. v_order_items.COUNT() LOOP
v_item := TREAT(v_order_items.get(j).get_element() AS pljson);
DBMS_OUTPUT.PUT_LINE(' Order item id: ' || v_item.get_string('id'));
END LOOP;
END LOOP;
END;
/
Which outputs:
Order id: 305954583
Order item id: 163220786
Order id: 170951391
Order item id: 112184373
db<>fiddle here

Elasticsearch fails to map imported data

I have managed to create an import from Kafka to Elasticsearch using Kafka Connect.
Connector-config:
{
"name": "raw-customer-equipment",
"config": {
"connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector",
"tasks.max": 1,
"topics": "raw.customer.equipment",
"key.ignore": true,
"value.converter.schemas.enable": false,
"schema.ignore": true,
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
"connection.url": "<elastic-url>",
"connection.username": "<user>",
"connection.password": "<pwd>",
"type.name": "_doc" }
}
However Elasticsearch doesnt seem to be able to map the imported (Json) data. When peeking on it in Kibana the imported data doesnt seem to be search'able.
{
"_index": "raw.customer.equipment",
"_type": "_doc",
"_id": "raw.customer.equipment+1+929943",
"_version": 1,
"_score": 0,
"_source": {
"ifstats_list": [
{
"Event Time": "1589212678436",
"AP_list": [
{
"AP ID": 1,
"AP Alias": "PRIV0"
},
{
"AP ID": 2,
"AP Alias": "VID1"
},
{
"AP ID": 5,
"AP Alias": "VID1_BH"
}
],
"Device Type": "<type>",
...
"Associated Stations": [
{
"Packets sent": 11056613,
"Packets received": 304744,
"Multiple Retries Count": 0,
"Channel STA": 6,
"MAC Address": "<mac>",
....
},
{
....
}]
....
I want to be able to query by for instance "MAC Address" but Elastic seem to just handle the imported data as a big text-chunk.
I guess It is something in the Kafka-connector setup that is missing or wrong but I fail to see what.
As you might have guessed Im new at Elastic, and Im not the one supposed to be able to use the data in the end
Any help appreciated
BR
Edit:
Added connector-config by request.

swift get currencycode from country code

I find the accepted answer to get currencycode from country code in objective c here - How to get ISOCurrencyCode from a ISOCountryCode in iphone sdk?
Can someone help with the swift code to do the same ?
let components: [String: String] = [NSLocaleCountryCode: "CA"]
let identifier = NSLocale.localeIdentifierFromComponents(components)
let locale = NSLocale(localeIdentifier: identifier)
let currencyCode = locale.objectForKey(NSLocaleCurrencyCode)
print(currencyCode)
public static func convertStringCurrencyToNumber(strCurrency:String, locale:String)->Double {
var formatter = NSNumberFormatter()
formatter.currencyCode = locale
formatter.numberStyle = NSNumberFormatterStyle.CurrencyStyle
if let converted:Double = formatter.numberFromString(strCurrency)?.doubleValue {
return converted
}else{
return 0.0
}
}
Changing formatter.locale to formatter.currencyCode works as expected with the ISO 4127 Country and Currency code "USD".
Use below this return type function to get country_code:-
//MARK:- GET COUNTRY CODE
func getCountryPhonceCode (_ country : String) -> String
{
let x : [String] = ["972", "IL",
"93" , "AF",
"355", "AL",
"213", "DZ",
"1" , "AS",
"376", "AD",
"244", "AO",
"1" , "AI",
"1" , "AG",
"54" , "AR",
"374", "AM",
"297", "AW",
"61" , "AU",
"43" , "AT",
"994", "AZ",
"1" , "BS",
"973", "BH",
"880", "BD",
"1" , "BB",
"375", "BY",
"32" , "BE",
"501", "BZ",
"229", "BJ",
"1" , "BM",
"975", "BT",
"387", "BA",
"267", "BW",
"55" , "BR",
"246", "IO",
"359", "BG",
"226", "BF",
"257", "BI",
"855", "KH",
"237", "CM",
"1" , "CA",
"238", "CV",
"345", "KY",
"236", "CF",
"235", "TD",
"56", "CL",
"86", "CN",
"61", "CX",
"57", "CO",
"269", "KM",
"242", "CG",
"682", "CK",
"506", "CR",
"385", "HR",
"53" , "CU" ,
"537", "CY",
"420", "CZ",
"45" , "DK" ,
"253", "DJ",
"1" , "DM",
"1" , "DO",
"593", "EC",
"20" , "EG" ,
"503", "SV",
"240", "GQ",
"291", "ER",
"372", "EE",
"251", "ET",
"298", "FO",
"679", "FJ",
"358", "FI",
"33" , "FR",
"594", "GF",
"689", "PF",
"241", "GA",
"220", "GM",
"995", "GE",
"49" , "DE",
"233", "GH",
"350", "GI",
"30" , "GR",
"299", "GL",
"1" , "GD",
"590", "GP",
"1" , "GU",
"502", "GT",
"224", "GN",
"245", "GW",
"595", "GY",
"509", "HT",
"504", "HN",
"36" , "HU",
"354", "IS",
"91" , "IN",
"62" , "ID",
"964", "IQ",
"353", "IE",
"972", "IL",
"39" , "IT",
"1" , "JM",
"81", "JP", "962", "JO", "77", "KZ",
"254", "KE", "686", "KI", "965", "KW", "996", "KG",
"371", "LV", "961", "LB", "266", "LS", "231", "LR",
"423", "LI", "370", "LT", "352", "LU", "261", "MG",
"265", "MW", "60", "MY", "960", "MV", "223", "ML",
"356", "MT", "692", "MH", "596", "MQ", "222", "MR",
"230", "MU", "262", "YT", "52","MX", "377", "MC",
"976", "MN", "382", "ME", "1", "MS", "212", "MA",
"95", "MM", "264", "NA", "674", "NR", "977", "NP",
"31", "NL", "599", "AN", "687", "NC", "64", "NZ",
"505", "NI", "227", "NE", "234", "NG", "683", "NU",
"672", "NF", "1", "MP", "47", "NO", "968", "OM",
"92", "PK", "680", "PW", "507", "PA", "675", "PG",
"595", "PY", "51", "PE", "63", "PH", "48", "PL",
"351", "PT", "1", "PR", "974", "QA", "40", "RO",
"250", "RW", "685", "WS", "378", "SM", "966", "SA",
"221", "SN", "381", "RS", "248", "SC", "232", "SL",
"65", "SG", "421", "SK", "386", "SI", "677", "SB",
"27", "ZA", "500", "GS", "34", "ES", "94", "LK",
"249", "SD", "597", "SR", "268", "SZ", "46", "SE",
"41", "CH", "992", "TJ", "66", "TH", "228", "TG",
"690", "TK", "676", "TO", "1", "TT", "216", "TN",
"90", "TR", "993", "TM", "1", "TC", "688", "TV",
"256", "UG", "380", "UA", "971", "AE", "44", "GB",
"1", "US", "598", "UY", "998", "UZ", "678", "VU",
"681", "WF", "967", "YE", "260", "ZM", "263", "ZW",
"591", "BO", "673", "BN", "61", "CC", "243", "CD",
"225", "CI", "500", "FK", "44", "GG", "379", "VA",
"852", "HK", "98", "IR", "44", "IM", "44", "JE",
"850", "KP", "82", "KR", "856", "LA", "218", "LY",
"853", "MO", "389", "MK", "691", "FM", "373", "MD",
"258", "MZ", "970", "PS", "872", "PN", "262", "RE",
"7", "RU", "590", "BL", "290", "SH", "1", "KN",
"1", "LC", "590", "MF", "508", "PM", "1", "VC",
"239", "ST", "252", "SO", "47", "SJ",
"963","SY",
"886",
"TW", "255",
"TZ", "670",
"TL","58",
"VE","84",
"VN",
"284", "VG",
"340", "VI",
"678","VU",
"681","WF",
"685","WS",
"967","YE",
"262","YT",
"27","ZA",
"260","ZM",
"263","ZW"]
var keys = [String]()
var values = [String]()
let whitespace = CharacterSet.decimalDigits
//let range = phrase.rangeOfCharacterFromSet(whitespace)
for i in x {
// range will be nil if no whitespace is found
if (i.rangeOfCharacter(from: whitespace) != nil) {
values.append(i)
}
else {
keys.append(i)
}
}
let countryCodeListDict = NSDictionary(objects: values as [String], forKeys: keys as [String] as [NSCopying])
if let t: AnyObject = (countryCodeListDict as? [String : AnyObject])? [country] {
return countryCodeListDict[country] as! String
} else
{
return ""
}
}

Resources