Angular2 - Load binary image data asynchronously inside ngFor - image

I'm trying to load some images using node's fs module. I have it working on a single image but what is the correct way to do this inside an ngFor loop?
Currently I have:
<div *ngFor="let job of getJobs() | async">
<img src={{readImageFile(job.thumbUrl)}}>
</div>
getJobs() returns an observable from my service.
readImageFile() calls a Meteor server-side method which loads the image data using fs and returns it asynchronously:
readImageFile(url) {
if (url) {
this.call('readImageFile', url, function(err, res) {
if (err) {
console.log(err);
}
else {
console.log('read image file success');
return "data:image/jpg;base64," + res;
}
});
}
}
This doesn't work.. So what is the correct method for asynchronously loading data inside an ngFor loop?
update
I tried with readImageFile(url) | async
readImageFile: function(url) {
var Future = Npm.require('fibers/future');
var myFuture = new Future();
fs.readFile(String(url),
function read(error, result) {
if(error){
myFuture.throw(error);
} else {
myFuture.return(new Buffer(result).toString('base64'));
}
});
return myFuture.wait();
}

This is not a good approach. readImageFile(job.thumbUrl) will be called with each change detection run which is quite often.
This should work
<img [src]="readImageFile(job.thumbUrl) | async">
The | async pipe should prevent change detection calling readImageFile() repeatedly.
Another approach would be to fill an array and bind to this array instead
getJobs() {
this.jobs = /*code to get jobs*/.map(jobs => {
this.images = new Array(jobs.length);
jobs.forEach(item, idx => {
readImageFile(idx, item.thumbUrl);
});
return jobs;
});
}
readImageFile(idx, url) {
if (url) {
this.call('readImageFile', url, (err, res) => {
if (err) {
console.log(err);
}
else {
console.log('read image file success');
this.images[idx] = "data:image/jpg;base64," + res;
}
});
}
}
<div *ngFor="let job of jobs | async; let idx=index">
<img [src]="images[idx]">
</div>

Related

Using a URL query parameter to version cached responses

I am trying to cache specific urls and each url has md5 hash and If the urls updated with new md5 i want to remove the current cache and add the new one.
cached url: http://www.mysite.lo/cards/index.php?md5=f51c2ef7795480ef2e0b1bd24c9e07
function shouldFetch(event) {
if ( event.request.url.indexOf( '/cards/') == -1 ) {
return false;
}
return true;
}
self.addEventListener('fetch', function(event) {
if (shouldFetch(event)) {
event.respondWith(
caches.match(event.request).then(function(response) {
if (response !== undefined) {
return response;
} else {
return fetch(event.request).then(function (response) {
let responseClone = response.clone();
caches.open('v1').then(function (cache) {
cache.put(event.request, responseClone);
});
return response;
}).catch(function (err) {
return caches.match(event.request);
});
}
})
);
}
});
I know we can use caches.delete() and so on, but I want to call it only if the md5 updated from the new request.
Thanks
You can accomplish roughly what you describe with the following, which makes use of the ignoreSearch option when calling cache.matchAll():
self.addEventListener('fetch', (event) => {
const CACHE_NAME = '...';
const url = new URL(event.request.url);
if (url.searchParams.has('md5')) {
event.respondWith((async () => {
const cache = await caches.open(CACHE_NAME);
const cachedResponses = await cache.matchAll(url.href, {
// https://developers.google.com/web/updates/2016/09/cache-query-options
ignoreSearch: true,
});
for (const cachedResponse of cachedResponses) {
// If we already have the incoming URL cached, return it.
if (cachedResponse.url === url.href) {
return cachedResponse;
}
// Otherwise, delete the out of date response.
await cache.delete(cachedResponse.url);
}
// If we've gotten this far, then there wasn't a cache match,
// and our old entries have been cleaned up.
const response = await fetch(event.request);
await cache.put(event.request, response.clone());
return response;
})());
}
// Logic for non-md5 use cases goes here.
});
(You could make things slightly more efficient by rearranging some of the cache-manipulation code to bring it out of the critical response path, but that's the basic idea.)

How to convert into promise and geocode the address with google maps

I want to convert this piece of code that I made into a promise
because I want to show the geocoded address into vuetify
this is my code so far, I'm including google maps api and lodash .
<!DOCTYPE html>
<html>
<head>
<title>Reverse Geocoding Sample</title>
</head>
<body>
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.10/lodash.min.js"></script>
<script>
function geocodelatLng(){
var response = [
{
"address": "213 Marlon Forks\nSouth Corineland, HI 81723-1044",
"lat": "10.30431500",
"lng": "123.89035500"
},
{
"address": "1291 Stephania Road\nLake Dorotheastad, TN 82682-76",
"lat": "10.30309100",
"lng": "123.89154500"
},
{
"address": "20330 Schmeler Course Apt. 210\nNorth Ari, NV 70048",
"lat": "10.30356400",
"lng": "123.89964100"
}
] ;
return _.map(response,coords => {
// console.log(arr.index);
var geocoder = new google.maps.Geocoder;
var latLng = {
lat : parseFloat(coords.lat),
lng : parseFloat(coords.lng)
} ;
// for every lat,lng .
// console.log(latLng);
geocoder.geocode({'location': latLng},function (results,status){
if (status === 'OK') {
if (results[0]) {
console.log(results[0].formatted_address);
} else {
window.alert('No results found');
}
} else {
window.alert('Geocoder failed due to: ' + status);
}
});
});
}
</script>
<script async defer
src="https://maps.googleapis.com/maps/api/js?key=AIzaSyA6vKL6Q4u5ZhGAJlYOMkQZ13pxCUXOe9k&callback=geocodelatLng">
</script>
</body>
</html>
Now this logs everything into the console but my problem is that I dont know how to show it in the v-list-tile-title tag. I tried everything used promises but it won't work, maybe you can help me. Not familiar with es6 tho.
<v-list-tile>
<v-list-tile-content>
<v-list-tile-title>{{ geocodedCoordinates address here }}</v-list-tile-title>
<v-list-tile-sub-title>{{ address.address }}</v-list-tile-sub-title>
</v-list-tile-content>
</v-list-tile>
On the basis that you want geocodelatLng() to return a promise that delivers geocode results, you first need something that will convert geocoder.geocode() from a callback API to Promises. The general principles of doing that are covered extensively here.
Following that advice, you might end up with something like this:
function geocodelatLng() {
var response = [ ....... ]; // as in the question
var promises = response.map(function(coords) { // map response to an array of Promises.
return new Promise(function(resolve, reject) {
var geocoder = new google.maps.Geocoder();
var latLng = {
'lat': parseFloat(coords.lat),
'lng': parseFloat(coords.lng)
};
geocoder.geocode({'location': latLng}, function(results, status) {
if (status === 'OK') {
if (results.length) {
resolve(results[0]); // or `resolve(results)` to deliver all results
} else {
reject(new Error('No results found'));
}
} else {
reject(new Error('Geocoder failed due to: ' + status));
}
});
});
});
return Promise.all(promises);
}
You are left with a possible issue that Promise.all() will return a rejected promise if any one of the promises rejects, which you probably don't want. It would be better to ignore rejections (errors) and deliver successfully derived results.
A solution to that issue is provided here in the form of a reflect function, which can be applied as follows:
function geocodelatLng() {
var response = [ ....... ]; // as in the question
function reflect(promise) {
return promise.then(function(v) {
return { 'status':'resolved', 'value':value };
}, function(e) {
return { 'status':'rejected', 'error':e };
});
}
var promises = response.map(coords => { // map response to an array of Promises.
return new Promise(function(resolve, reject) {
var geocoder = new google.maps.Geocoder();
var latLng = {
'lat': parseFloat(coords.lat),
'lng': parseFloat(coords.lng)
};
geocoder.geocode({'location': latLng}, function(results, status) {
if (status === 'OK') {
if (results[0]) {
resolve(results[0]);
} else {
reject(new Error('No results found'));
}
} else {
reject(new Error('Geocoder failed due to: ' + status));
}
});
});
});
return Promise.all(promises.map(reflect)) // map `promises` to an array of "refelected" promises before passing to Promise.all()
.then(function(results) {
return results.filter(function(res) { // filter the reflected results to exclude errors
return res.status === 'resolved';
}).map(function(res) { // map the remaining reflected results to the value of interest
return res.value;
});
});
}
geocodelatLng's caller is now returned a Promise that will deliver (via .then()) all the successful geocode results, which can be passed to vuetify or whatever.

wait for a job finished to render component in vuejs

I have a component here, and I need first to make a request using socket.io :
<template>
<h1>Don't show me before the socket's response</h1>
</template>
<script>
export default {
beforeCreate: function() {
let sessid = this.$cookie.get('sessid')
this.$options.sockets.logout = (data) => {
if (data.redirect) {
this.$router.push(data.redirect)
} else {
console.log('here, you can render the template')
}
}
this.$socket.emit('logout', { sessid })
}
}
</script>
This code works, but it shows the template in browser for a quick moment, before the redirection happens.
I would like to know if there's a tick to wait the socket response for rendering the template.
You can use v-if, when the socket response arrives, you can set a variable which can be used with v-if to not show the HTML, something like following:
<template>
<h1 v-if="sockResp">Don't show me before the socket's response</h1>
</template>
<script>
export default {
data: function() {
return {
sockResp: false
}
},
beforeCreate: function() {
let sessid = this.$cookie.get('sessid')
this.$options.sockets.logout = (data) => {
if (data.redirect) {
this.$router.push(data.redirect)
} else {
console.log('here, you can render the template')
this.sockResp = true
}
}
this.$socket.emit('logout', { sessid })
}
}
</script>

FIXED: SailsJs + Google Blogger is timing out

So I've been tinkering with SailsJs and really like it so far, but I am trying to pull from posts from a blog I own into a view. This is a bit of a problem because the connection times out when trying to get the index view, and there is no feedback from the console via the console.log entries.
Blogger Service
// BloggerService.js - in api/services
var g = require('googleapis');
var apiKey = 'OUche33eekym0nKEY-uME';
exports.getBlogPosts = function(options, cb) {
g.discover('blogger', 'v3').execute(function(err, client) {
if(err) {
cb = null;
return console.log(err);
} else {
var opts = { 'blogId': options.id, 'maxResults': options.limit, 'fields': 'items(title,content,url)' };
cb = client.blogger.posts.list(opts);
};
});
};
exports.getBlogPost = function(options, cb) {
g.discover('blogger', 'v3').execute(function(err, client) {
if(err) {
cb = null;
return console.log(err);
} else {
var opts = { 'blogId': options.id, 'postId': options.postId };
cb = client.blogger.posts.get(opts);
};
});
};
Calling the service in the controller. Frustrating because the bottom of the documentation has a very cavalier way of saying where/how the service is called.
BlogController.js
/**
* BlogController.js
*
* #description ::
* #docs :: http://sailsjs.org/#!documentation/controllers
*/
module.exports = {
index: function(req, res){
BloggerService.getBlogPosts({'id':'86753098675309','limit':6},function(err, blogPosts){
if(err){
return console.log(err);
} else {
console.log(blogPosts.items[0].url);
res = blogPosts;
};
});
}
}
Index view
<div>
<% _.each(Model.items, function (blogPost) { %>
<div class="panel panel-default">
<div class="panel-heading"><%= blogPost.title %></div>
<div class="panel-body"><%= blogPost.content %><input type="hidden" value="<%= blogPost.id %>"></div>
</div>
<% }) %>
</div>
Any help would be greatly appreciated. Thank you for the time you spent looking at this.
UPDATE
Many thanks to Scott, who got me closer to the end results. This is what I have thus far, but just need to clear up an authentication issue with discover/apiKeys.
exports.getBlogPosts = function(options, cb) {
g.discover('blogger', 'v3').execute(function(err, client) {
if(err) {
cb(err);
} else {
var opts = { 'blogId': options.id, 'maxResults': options.limit, 'fetchBodies': false, 'fields': 'items(title,url)' }
client.blogger.posts.list(opts).withApiKey(apiKey).execute(cb);
};
});
};
exports.getBlogPost = function(options, cb) {
g.discover('blogger', 'v3').execute(function(err, client) {
if(err) {
cb(err);
} else {
var opts = { 'blogId': options.id, 'postId': options.postId };
client.blogger.posts.get(opts).withApiKey(apiKey).execute(cb);
};
});
};
It looks like you might be new to Node.js, so you might want to read up on asynchronous programming with Node and the req and res objects used by Express. The two problems I see with your code right off the bat are:
That you're assigning a value to your callback in BloggerService.js, rather than actually calling the callback: cb = client.blogger.posts.list(opts) should be (based on a quick scan of the Google API docs) client.blogger.posts.list(opts).execute(cb), and in case of an error cb = null should be cb(err).
You're assigning a value to the response object, instead of sending a response: res = blogPosts should be res.json(blogPosts).
As far as where / when you call your service, the docs aren't intending to be cavalier. The services are globalized so that they can be called from anywhere within any controller; it's up to you as the developer to decide where you need your service calls to be!

node app not working while mongodb is inserting

I'm still learning mongodb and node.js.
I was trying to insert bulk amount of data say 100k rows with 80-100 columns. I created an app using express.js for that. I coded in such a a way that the page redirects to another page once upload button is clicked and in the background the app inserts the data into the db.
The issue is that once the inserting starts the app stops responding. Even after the completion of insert the app will be slower for about 2-3 minutes.
I tried this code on
1
Fedora 14
Intel P4 2.80GHz
1.5GiB
2
Fedora 14
i3 3.20GHz
3GiB
How to avoid this problem? Will it work better on a better system?
Code to store the data
MongoClient.connect(MONGODB_URI, function (err, database) {
var collection = database.collection(collectionName);
for (index = 0; index < jsonResult.length; ++index) {
var obj = jsonResult[index];
for (var prop in obj) {
if (headers[prop] == 'Date') {
obj[prop] = new Date(obj[prop].replace(/(\d{2})-(\d{2})-(\d{4})/, "$2/$1/$3"));
} else if (headers[prop] == 'Integer') {
obj[prop] = parseInt(obj[prop]);
} else if (headers[prop] == 'Float') {
obj[prop] = parseFloat(obj[prop]);
}
}
collection.insert(obj, function (err, result) {
if (err) {
console.log(JSON.stringify(err));
}
});
}
});
Note:
jsonResult is the result of reading a file(csv) and converting it
into json
headers is an object that have the key-data type mapping
var headers = {
'iodate': 'date',
'sno': 'integer',
'salary': 'float'
}
Code to read and convert csv to json
var cv_json = require('convert-json'); // required module convert-json (convert-csv,xls,xlsx into json)
cv_json({
input: target_path,
output: null
}, function (err, result) {
if (err) {
console.error(err);
} else {
persists(req, res, result, collectionName, headers); //Function where insert takes place.
}
})
Right now, the "persists()" return imidiately, even though the insert hasn't finished. This is due to the asyncronus nature of javascript/nodejs.
You need to serialize the insert operations so you don't clog the server. Use async.eachSeries()
Your "persists" function should use a callback to notify the caller that it has finished. Somehing like this:
persists(req, res, result, collectionName, headers, callback){
MongoClient.connect(MONGODB_URI, function (err, database) {
var collection = database.collection(collectionName);
async.eachSeries(jsonResult,
function(obj, cb){
for (var prop in obj) {
if (headers[prop] == 'Date') {
obj[prop] = new Date(obj[prop].replace(/(\d{2})-(\d{2})-(\d{4})/,"$2/$1/$3"));
} else if (headers[prop] == 'Integer') {
obj[prop] = parseInt(obj[prop]);
} else if (headers[prop] == 'Float') {
obj[prop] = parseFloat(obj[prop]);
}
}
collection.insert(obj, function (err, result) {
cb(err);
});
},
function(err){
callback();
}
);
});
}
And then to use it
persists(req, res, result, collectionName, headers, function(){
console.log("insert finished");
});

Resources