Facebook photo upload date timestamp - facebook

I've downloaded all my Facebook data and wish to upload some of the images that I've sent via Messenger to Google Photos. I wish to have them to have the correct metadata so they are uploaded under the correct day, not under today. Unfortunately, they have the date of download for Date created.
I tried parsing the title, but it doesn't seem to be a timestamp.
My question is: is there a way to create a script that adds the correct metadata to a photo downloaded from Facebook (via Download your information archive)? An example title is: 142666616_209126620919024_535058535265435125_n.jpg. This photo should have the date Jan 27, 2021, 10:53 AM.

After some digging I found a solution.
The archive that Facebook gives you has folders for each friend with the following structure:
\friend_name_a1b2c3
\photos
12345678_123456788996_123124421.jpg
\gifs
\audio
messages_1.json
messages_1.json has all your messages with that friend and here is an example how a message looks like:
{
"sender_name": "Your Name",
"timestamp_ms": 1562647443588,
"photos": [
{
"uri": "messages/inbox/friend_name_a1b2c3/photos/12345678_123456788996_123124421.jpg",
"creation_timestamp": 1562647443
}
],
"type": "Generic",
"is_unsent": false
},
So, using glob and utimes I came up with the following script:
var glob = require("glob")
var Promise = require('bluebird');
var fs = Promise.promisifyAll(require("fs"));
var { utimes } = require("utimes");
const readJSONFiles = async () => {
const messagesFiles = glob.sync(`**/message_*.json`)
const promises = [];
messagesFiles.forEach(mFile => {
promises.push(fs.readFileAsync(mFile, 'utf8'));
})
return Promise.all(promises);
}
readJSONFiles().then(result => {
const map = {};
result.forEach(data => {
const messagesContents = JSON.parse(data);
messagesContents.messages
.filter(m => m.photos)
.forEach(m => {
m.photos.filter(p => {
const splitted = p.uri.split("/")
const messagePhotoFileName = splitted[splitted.length - 1];
map[messagePhotoFileName] = m.timestamp_ms;
})
})
})
fs.writeFileSync("./map.json", JSON.stringify(map))
}).then(() => {
fs.readFileAsync("./map.json", 'utf8').then(data => {
const map = JSON.parse(data);
glob("**/*.jpg", function (er, files) {
files.forEach(file => {
const [, , photo] = file.split("/");
utimes(file, {
btime: map[photo],
atime: map[photo],
mtime: map[photo]
});
})
})
})
});
It creates a map of file-name:date-taken then loops over all .jpg files and changes its metadata. It definitely is a little rough around the edges but gets the job done, after all.

Related

Mochawesome with Cypress - how to get aggregated charts at higher level?

I've just started using mochawesome with Cypress (9.7). Our test structure is basically a number of spec files, each following something like the following format:
describe('(A): description of this spec', () => {
describe ('(B): description of test abc', () => {
before(() => {
// do specific set up bits for this test
})
it('(C): runs test abc', () => {
// do actual test stuff
})
})
})
Where within each spec file there would be a single 'A' describe block, but there can be many 'B' level blocks (each with a single 'C') - done this way because the before block for each 'C' is always different - I couldn't use a beforeEach.
When I run my various spec files, each structured similarly to the above, the mochaewsome output is mostly correct - I get a collapsible block for each spec file at level 'A', each with multiple collapsible blocks at level B, each with test info as expected at level C.
But... The circular charts are only displayed at level B. What I was hoping, was that it might be possible to have aggregated charts at level A, and a further aggregated chart for all the level A blocks.
Not sure I've explained this brilliantly(!), but hopefully someone understands, and can offer a suggestion?!
In cypress-mochawesome-reporter there's an alternative setup using on('after:run') which can perform the aggregation.
In Cypress v9.7.0
// cypress/plugins/index.js
const { beforeRunHook, afterRunHook } = require('cypress-mochawesome-reporter/lib');
const { aggregateResults } = require('./aggregate-mochawesome-report-chart');
module.exports = (on, config) => {
on('before:run', async (details) => {
await beforeRunHook(details);
});
on('after:run', async () => {
aggregateResults(config)
await afterRunHook();
});
};
In Cypress v10+
// cypress.config.js
const { defineConfig } = require('cypress');
const { beforeRunHook, afterRunHook } = require('cypress-mochawesome-reporter/lib');
const { aggregateResults } = require('./aggregate-mochawesome-report-chart');
module.exports = defineConfig({
reporter: 'cypress-mochawesome-reporter',
video: false,
retries: 1,
reporterOptions: {
reportDir: 'test-report',
charts: true,
reportPageTitle: 'custom-title',
embeddedScreenshots: true,
inlineAssets: false,
saveAllAttempts: false,
saveJson: true
},
e2e: {
setupNodeEvents(on, config) {
on('before:run', async (details) => {
await beforeRunHook(details);
});
on('after:run', async () => {
aggregateResults(config)
await afterRunHook();
});
},
},
});
The module to do the aggregation is
// aggregate-mochawesome-reporter-chart.js
const path = require('path');
const fs = require('fs-extra')
function aggregateResults(config) {
const jsonPath = path.join(config.reporterOptions.reportDir , '/.jsons', '\mochawesome.json');
const report = fs.readJsonSync(jsonPath)
const topSuite = report.results[0].suites[0]
aggregate(topSuite)
fs.writeJsonSync(jsonPath, report)
}
function aggregate(suite, level = 0) {
const childSuites = suite.suites.map(child => aggregate(child, ++level))
suite.passes = suite.passes.concat(childSuites.map(child => child.passes)).flat()
suite.failures = suite.failures.concat(childSuites.map(child => child.failures)).flat()
suite.pending = suite.pending.concat(childSuites.map(child => child.pending)).flat()
suite.skipped = suite.skipped.concat(childSuites.map(child => child.skipped)).flat()
if (!suite.tests.length && suite.suites[0].tests.length) {
// trigger chart when to describe has no tests
suite.tests = [
{
"title": "Aggregate of tests",
"duration": 20,
"pass": true,
"context": null,
"err": {},
"uuid": "0",
"parentUUID": suite.uuid,
},
]
}
return suite
}
module.exports = {
aggregateResults
}
The function aggregate() recursively loops down through child suites and adds the test results to the parent.
json files
Note the json file is different at the point where afterRunHook runs and at the end of the test run.
If you have the option saveJson: true set, you will get a final json file in the report directory called index.json.
At the afterRunHook stage the file is mochawesome.json.
Before aggregation
After aggregation

Image returned from REST API always displays broken

I am building a content management system for an art portfolio app, with React. The client will POST to the API which uses Mongoose to insert into a MongoDB. The API then queries the DB for the newly inserted image, and returns it to the client.
Here's my code to connect to MongoDB using Mongoose:
mongoose.connect('mongodb://localhost/test').then(() =>
console.log('connected to db')).catch(err => console.log(err))
mongoose.Promise = global.Promise
const db = mongoose.connection
db.on('error', console.error.bind(console, 'MongoDB connection error:'))
const Schema = mongoose.Schema;
const ImgSchema = new Schema({
img: { data: Buffer, contentType: String }
})
const Img = mongoose.model('Img', ImgSchema)
I am using multer and fs to handle the image file. My POST endpoint looks like this:
router.post('/', upload.single('image'), (req, res) => {
if (!req.file) {
res.send('no file')
} else {
const imgItem = new Img()
imgItem.img.data = fs.readFileSync(req.file.path)
imgItem.contentType = 'image/png'
imgItem
.save()
.then(data =>
Img.findById(data, (err, findImg) => {
console.log(findImg.img)
fs.writeFileSync('api/uploads/image.png', findImg.img.data)
res.sendFile(__dirname + '/uploads/image.png')
}))
}
})
I can see in the file structure that writeFileSync is writing the image to the disk. res.sendFile grabs it and sends it down to the client.
Client side code looks like this:
handleSubmit = e => {
e.preventDefault()
const img = new FormData()
img.append('image', this.state.file, this.state.file.name)
axios
.post('http://localhost:8000/api/gallery', img, {
onUploadProgress: progressEvent => {
console.log(progressEvent.loaded / progressEvent.total)
}
})
.then(res => {
console.log('responsed')
console.log(res)
const returnedFile = new File([res.data], 'image.png', { type: 'image/png' })
const reader = new FileReader()
reader.onloadend = () => {
this.setState({ returnedFile, returned: reader.result })
}
reader.readAsDataURL(returnedFile)
})
.catch(err => console.log(err))
}
This does successfully place both the returned file and the img data url on state. However, in my application, the image always displays broken.
Here's some screenshots:
How to fix this?
Avoid sending back base64 encoded images (multiple images + large files + large encoded strings = very slow performance). I'd highly recommend creating a microservice that only handles image uploads and any other image related get/post/put/delete requests. Separate it from your main application.
For example:
I use multer to create an image buffer
Then use sharp or fs to save the image (depending upon file type)
Then I send the filepath to my controller to be saved to my DB
Then, the front-end does a GET request when it tries to access: http://localhost:4000/uploads/timestamp-randomstring-originalname.fileext
In simple terms, my microservice acts like a CDN solely for images.
For example, a user sends a post request to http://localhost:4000/api/avatar/create with some FormData:
It first passes through some Express middlewares:
libs/middlewares.js
...
app.use(cors({credentials: true, origin: "http://localhost:3000" })) // allows receiving of cookies from front-end
app.use(morgan(`tiny`)); // logging framework
app.use(multer({
limits: {
fileSize: 10240000,
files: 1,
fields: 1
},
fileFilter: (req, file, next) => {
if (!/\.(jpe?g|png|gif|bmp)$/i.test(file.originalname)) {
req.err = `That file extension is not accepted!`
next(null, false)
}
next(null, true);
}
}).single(`file`))
app.use(bodyParser.json()); // parses header requests (req.body)
app.use(bodyParser.urlencoded({ limit: `10mb`, extended: true })); // allows objects and arrays to be URL-encoded
...etc
Then, hits the avatars route:
routes/avatars.js
app.post(`/api/avatar/create`, requireAuth, saveImage, create);
It then passes through some user authentication, then goes through my saveImage middleware:
services/saveImage.js
const createRandomString = require('../shared/helpers');
const fs = require("fs");
const sharp = require("sharp");
const randomString = createRandomString();
if (req.err || !req.file) {
return res.status(500).json({ err: req.err || `Unable to locate the requested file to be saved` })
next();
}
const filename = `${Date.now()}-${randomString}-${req.file.originalname}`;
const filepath = `uploads/${filename}`;
const setFilePath = () => { req.file.path = filepath; return next();}
(/\.(gif|bmp)$/i.test(req.file.originalname))
? fs.writeFile(filepath, req.file.buffer, (err) => {
if (err) {
return res.status(500).json({ err: `There was a problem saving the image.`});
next();
}
setFilePath();
})
: sharp(req.file.buffer).resize(256, 256).max().withoutEnlargement().toFile(filepath).then(() => setFilePath())
If the file is saved, it then sends a req.file.path to my create controller. This gets saved to my DB as a file path and as an image path (the avatarFilePath or /uploads/imagefile.ext is saved for removal purposes and the avatarURL or [http://localhost:4000]/uploads/imagefile.ext is saved and used for the front-end GET request):
controllers/avatars.js (I'm using Postgres, but you can substitute for Mongo)
create: async (req, res, done) => {
try {
const avatarurl = `${apiURL}/${req.file.path}`;
await db.result("INSERT INTO avatars(userid, avatarURL, avatarFilePath) VALUES ($1, $2, $3)", [req.session.id, avatarurl, req.file.path]);
res.status(201).json({ avatarurl });
} catch (err) { return res.status(500).json({ err: err.toString() }); done();
}
Then when the front-end tries to access the uploads folder via <img src={avatarURL} alt="image" /> or <img src="[http://localhost:4000]/uploads/imagefile.ext" alt="image" />, it gets served up by the microservice:
libs/server.js
const express = require("express");
const path = app.get("path");
const PORT = 4000;
//============================================================//
// EXPRESS SERVE AVATAR IMAGES
//============================================================//
app.use(`/uploads`, express.static(`uploads`));
//============================================================//
/* CREATE EXPRESS SERVER */
//============================================================//
app.listen(PORT);
What it looks when logging requests:
19:17:54 INSERT INTO avatars(userid, avatarURL, avatarFilePath) VALUES ('08861626-b6d0-11e8-9047-672b670fe126', 'http://localhost:4000/uploads/1536891474536-k9c7OdimjEWYXbjTIs9J4S3lh2ldrzV8-android.png', 'uploads/1536891474536-k9c7OdimjEWYXbjTIs9J4S3lh2ldrzV8-android.png')
POST /api/avatar/create 201 109 - 61.614 ms
GET /uploads/1536891474536-k9c7OdimjEWYXbjTIs9J4S3lh2ldrzV8-android.png 200 3027 - 3.877 ms
What the user sees upon successful GET request:

Cordova Ionic Native File plugin - Where does it store the files in iOS?

I been trying to save a file on iOS. Right now I been running through Xcode right to my phone.
When I run the app it says that is saved successfully. But I don’t see any file when I use a file manager app like FileApp, FileManager, iCloud and the Apple crappy Files app.
My question is that I heard from a web search that in order to save the file iOS creates a sandbox folder for the app.
If I been saving it to this.file.documentDirectory, how can a user open it in let’s say Pages or Numbers apps? (You know Apple’s Word and Excel replacement for the uninitiated.)
Here’s the code.
writeToIOS(opts : {
fileName: string,
text: any
}) : Promise < IResponse < any >> {
let response: IResponse < boolean > = {
success: false,
error: '',
data: false
};
const options: IWriteOptions = {
replace: true
};
return new Promise((resolve, reject) => {
const path = this.file.documentsDirectory;
const directory = 'Attendance Log';
this
.file
.checkDir(path, directory)
.then(res => {
this
.file
.writeFile(path + directory, opts.fileName, opts.text, options)
.then(res => {
response = {
...response,
success: true,
error: res,
data: res
};
resolve(response);
})
.catch(error => reject(error));
})
.catch(() => {
this
.file
.createDir(path, directory, true)
.then(directory => {
this
.file
.writeFile(path + directory.name, opts.fileName, opts.text, options)
.then(res => {
response = {
...response,
success: true,
error: res,
data: res
};
resolve(response);
})
.catch(error => reject(error));
})
.catch(error => reject(error));
});
});
}
EDIT:
What the client/user should do is be able to choose the file and open it in his/her favorite app. That being a File Manager app or a Writer or Spreadsheet app.
I don't have too much experience with iOS, but I think you have chosen good dir.
cordova.file.documentsDirectory - Files private to the app, but that
are meaningful to other application (e.g. Office files). Note that for
OSX this is the user's ~/Documents directory. (iOS, OSX)
In iOS i would search /var/mobile/Applications/<UUID>/Documents
This is using the cordova-plugin-fileopener2 in conjuction with ionic native file opener:
const options = {
replace: true
};
const path = this.file.documentsDirectory;
const directory = 'Attendance Log';
this.file
.checkDir(path, directory)
.then(res => {
this.file
.writeFile(path + directory, opts.fileName, opts.text, options)
.then(res => {
this.fileOpener
.open(`${path}${directory}/${opts.fileName}` , 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
.then(() => ...)
.catch(error => ...);
})
.catch(error => ...);

facebook messenger bot encoding error

I have written sample echo message bot using facebook messenger api and wit.ai actions.
My message from facebook page is received and the proper action function defined using wit api's is also getting called. However
while returning the response, i am getting followin error as -
Oops! An error occurred while forwarding the response to : Error: (#100) Param message[text] must be a UTF-8 encoded string
at fetch.then.then.json (/app/index.js:106:13)
at process._tickCallback (internal/process/next_tick.js:103:7)
Here is the function which is used to return the response -
const fbMessage = (id, text) => {
const body = JSON.stringify({
recipient: { id },
message: { text },
});
const qs = 'access_token=' + encodeURIComponent(FB_PAGE_ACCESS_TOKEN);
return fetch('https://graph.facebook.com/v2.6/me/messages?' + qs, {
method: 'POST',
headers: {'Content-Type': 'application/json; charset=UTF-8'},
body
})
.then(rsp => rsp.json())
.then(json => {
if (json.error && json.error.message) {
throw new Error(json.error.message);`enter code here`
}
return json;
});
};
I have copied this function from the messenger.js file from the documentation since i am just trying the POC.
I checked the values for text and id in this function and verified using console.log statements and those are coming properly.
Can some experts help me to solve this error?
Note - I tried encoding the text using text.toString("utf8"); but it returns the encoding string as [object object] and thats the
response i get from bot. so it doesnt work.
Get the latest code from node-wit, there is a change in facebook id usage,
According to Facebook:
On Tue May 17 format of user and page ids delivered via webhooks will
change from an int to a string to better support default json encoder
in js (that trims long ints). Please make sure your app works with
string ids returned from webhooks as well as with ints.
Still you are getting issue with the api try to add if(event.message && !event.message.is_echo) condition as shown in below code.
// Message handler
app.post('/webhook', (req, res) => {
const data = req.body;
if (data.object === 'page') {
data.entry.forEach(entry => {
entry.messaging.forEach(event => {
if (event.message && !event.message.is_echo) {
const sender = event.sender.id;
const sessionId = findOrCreateSession(sender);
const {text, attachments} = event.message;
if (attachments) {
fbMessage(sender, 'Sorry I can only process text messages for now.')
.catch(console.error);
} else if (text) {
wit.runActions(
sessionId, // the user's current session
text, // the user's message
sessions[sessionId].context // the user's current session state
).then((context) => {
console.log('Waiting for next user messages');
sessions[sessionId].context = context;
})
.catch((err) => {
console.error('Oops! Got an error from Wit: ', err.stack || err);
})
}
} else {
console.log('received event', JSON.stringify(event));
}
});
});
}
res.sendStatus(200);
});
Reference:
no matching user bug
no matching user fix

Finding total contributions of a user from GitHub API

I am trying to extract the below info for any user from GitHub.
Is there a way/API exposed in GitHub REST API where we can get this information directly?
Answers for 2019, Use GitHub API V4.
First go to GitHub to apply for a token: https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line. step 7, scopes select only read:user
cUrl
curl -H "Authorization: bearer token" -X POST -d '{"query":"query {\n user(login: \"MeiK2333\") {\n name\n contributionsCollection {\n contributionCalendar {\n colors\n totalContributions\n weeks {\n contributionDays {\n color\n contributionCount\n date\n weekday\n }\n firstDay\n }\n }\n }\n }\n}"}' https://api.github.com/graphql
JavaScript
async function getContributions(token, username) {
const headers = {
'Authorization': `bearer ${token}`,
}
const body = {
"query": `query {
user(login: "${username}") {
name
contributionsCollection {
contributionCalendar {
colors
totalContributions
weeks {
contributionDays {
color
contributionCount
date
weekday
}
firstDay
}
}
}
}
}`
}
const response = await fetch('https://api.github.com/graphql', { method: 'POST', body: JSON.stringify(body), headers: headers })
const data = await response.json()
return data
}
const data = await getContributions('token', 'MeiK2333')
console.log(data)
Yes, You can do this easily with the new graphql API
Check out the explorer: https://developer.github.com/v4/explorer/
There you can see the contributions collection which is an edge of the user. You can get all of the information necessary to rebuild the calendar.
I've included a full example, and the explorer documentation can guide you even further.
Specifically to answer your question, the query.user.contributionsCollection.contributionsCalendar.totalContributions
is what you are looking for
Go ahead and copy/paste the following into the explorer and you will see my contribution history for the last year
query {
user(login: "qhenkart") {
email
createdAt
contributionsCollection(from: "2019-09-28T23:05:23Z", to: "2020-09-28T23:05:23Z") {
contributionCalendar {
totalContributions
weeks {
contributionDays {
weekday
date
contributionCount
color
}
}
months {
name
year
firstDay
totalWeeks
}
}
}
}
}
To load the svg with all contributions you can use this code in your html page
<img src="https://ghchart.rshah.org/username" alt="Name Your Github chart">
To customize color you can just do that
<img src="https://ghchart.rshah.org/HEXCOLORCODE/username" alt="Name Your Github chart">
HEXCOLORCODE = 17A2B8
You can use the github events api for that:
Example (node.js)
const got = require('got')
async function getEvents(username) {
const events = []
let page = 1
do {
const url = `https://api.github.com/users/${username}/events?page=${page}`
var { body } = await got(url, {
json: true
})
page++
events.push(...body)
} while(!body.length)
return events
}
(async () => {
const events = await getEvents('handtrix')
console.log('Overall Events', events.length)
console.log('PullRequests', events.filter(event => event.type === 'PullRequestEvent').length)
console.log('Forks', events.filter(event => event.type === 'ForkEvent').length)
console.log('Issues', events.filter(event => event.type === 'IssuesEvent').length)
console.log('Reviews', events.filter(event => event.type === 'PullRequestReviewEvent').length)
})()
Example (javascript)
async function getEvents(username) {
const events = []
let page = 1
do {
const url = `https://api.github.com/users/${username}/events?page=${page}`
var body = await fetch(url).then(res => res.json())
page++
events.push(...body)
} while(!body.length)
return events
}
(async () => {
const events = await getEvents('handtrix')
console.log('Overall Events', events.length)
console.log('PullRequests', events.filter(event => event.type === 'PullRequestEvent').length)
console.log('Forks', events.filter(event => event.type === 'ForkEvent').length)
console.log('Issues', events.filter(event => event.type === 'IssuesEvent').length)
console.log('Reviews', events.filter(event => event.type === 'PullRequestReviewEvent').length)
})()
Documentation
https://developer.github.com/v3/activity/events/
https://developer.github.com/v3/activity/events/types/
https://www.npmjs.com/package/got
You can get the svg calendar from https://github.com/users/<USER>/contributions with to URL parameter like :
https://github.com/users/bertrandmartel/contributions?to=2016-12-31
You can use a basic xml parser to sum all the contributions from the svg.
An example with curl & xmlstarlet for year 2016:
curl -s "https://github.com/users/bertrandmartel/contributions?to=2016-12-31" | \
xmlstarlet sel -t -v "sum(//svg/g/g/rect/#data-count)"
I believe you can see count of contributions in a timeframe as well as other individual contributor analytics within Code Climate’s Velocity git analytics, which you may request access to here: https://go.codeclimate.com/velocity-free-for-teams
You could use this function to extract the contributions from the last year (client):
function getContributions(){
const svgGraph = document.getElementsByClassName('js-calendar-graph')[0];
const daysRects = svgGraph.getElementsByClassName('day');
const days = [];
for (let d of daysRects){
days.push({
date: d.getAttribute('data-date'),
count: d.getAttribute('data-count')
});
}
return days;
}
I've also written a small node module which can 'extract' the contributions
#simonwep/github-contributions
Maybe this will help you (even I'm 4 years to late)
If you would prefer a npm package, you can try this.
https://github.com/SammyRobensParadise/github-contributions-counter#readme
You can get all time contributions or contributions by each year.