Why is gulp-rsync not deploying? - deployment

Im trying to deploy to a staging site with gulp-rsync. I'm not receiving any errors but it's not deploying to My server. I would also expect to be asked for the password, which is not happening.
var gulp = require('gulp'),
gutil = require('gulp-util'),
sass = require('gulp-sass'),
autoprefixer = require('gulp-autoprefixer'),
minifycss = require('gulp-minify-css'),
jshint = require('gulp-jshint'),
stylish = require('jshint-stylish'),
uglify = require('gulp-uglify'),
concat = require('gulp-concat'),
rename = require('gulp-rename'),
plumber = require('gulp-plumber'),
bower = require('gulp-bower'),
sftp = require('gulp-sftp'),
rsync = require('gulp-rsync');
gulp.task('deploy', function() {
gulp.src('build/test_for_rsync')
.pipe(rsync({
root: 'build',
hostname: '*****.wpengine.com',
username: '*****',
port: 2222,
destination: '/wp-content/themes/',
incremental: true,
progress: true,
relative: true,
exclude: ['/node_modules', '/bower_components'],
recursive: true
}));
});

Try using return keyword before gulp.src:
gulp.task('deploy', function() {
return gulp.src('build/test_for_rsync')
.pipe(rsync({
root: 'build', ...

Related

Running sequilize migration with umzug through github ci/cd

im using sequlize with umzug - migrations work locally, when i create a job for it, it cannot find the neccessery modules.
I got a mirgrator.js file.
const { migrator } = require('./iumzug.js');
migrator.runAsCLI()
And an iumzug.ts file as well, which configured like this.
const { Sequelize } = require('sequelize');
const { envVar } = require('./src/utilities/env-var')
const { Umzug, SequelizeStorage } = require("umzug")
const sequelize = new Sequelize({
database: envVar.DB_DATABASE,
host: envVar.DB_HOST,
port: 5432,
schema: ["TEST"].includes(envVar.NODE_ENV) ? 'test' : 'public',
username: envVar.DB_USERNAME,
password: envVar.DB_PASSWORD,
dialect: 'postgres',
ssl: true,
dialectOptions: {
ssl: {
require: true,
},},});
const migrator = new Umzug({
migrations: {
glob: ["./src/database/*.ts", { cwd: __dirname }],
resolve: ({ name, path, context }) => {
// eslint-disable-next-line #typescript-eslint/no-var-requires
const migration = require(path);
return {
// adjust the parameters Umzug will
// pass to migration methods when called
name,
up: async () => migration.up(context, Sequelize),
down: async () => migration.down(context, Sequelize)
};
}
},
context: sequelize.getQueryInterface(),
storage: new SequelizeStorage({
sequelize,
modelName: "migration_meta"
}),
logger: console
});
module.exports = { migrator }
i created a migration job on my github yml file as followes:
migrations:
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout#v3
- name: migrations step
run: |
node migrator.js up
when i run github action - i get this error
looking for alternatives / directions to fix it.
Let me know if i need to add anymore code / pictures of the process.

Getting Protractor Tests to Run on SauceLabs

I am trying to launch some tests with protractor going to SauceLabs.
I have my SauceConnect up and running. I have my protractor.config.js setup correctly I believe, but when I run the tests on my machine it with ng e2e --suite smoke, it is just running on my local machine and not going through the tunnel. Any suggestions? I have been following this "tutorial" and it has been going pretty well, but I am just not seeing anything going through the tunnel.
Here is my protractor.config.js file:
const baseUrl = '<BASEURL>';
const maxNumberOfInstances = process.env.NUMBER_OF_INSTANCES ? process.env.NUMBER_OF_INSTANCES : 1;
const reportPath = 'protractor/report';
const HtmlScreenshotReporter = require('protractor-jasmine2-screenshot-reporter');
const screenShotReporter = new HtmlScreenshotReporter({
dest: reportPath,
filename: 'artemis-e2e-report.html'
});
const SAUCELABS_USERNAME = '<SAUCEUSERNAME';
const SAUCELABS_AUTHKEY = '<SAUCEKEY>';
const chromeArgs = process.env.IS_LOCAL ? ['--no-sandbox', '--test-type=browser', '--lang=en', '--window-size=1680,1050'] : ['--disable-gpu', '--no-sandbox', '--test-type=browser', '--lang=en', '--window-size=1680,1050'];
const browserCapabilities = [{
sauceUser: SAUCELABS_USERNAME,
sauceKey: SAUCELABS_AUTHKEY,
browserName: 'chrome',
tunnelIdentifier: '<SAUCETUNNEL>',
shardTestFiles: true,
maxInstances: maxNumberOfInstances,
platform: 'Windows 10',
version: '73.0',
screenResolution: '1280x1024',
chromeOptions: {
args: chromeArgs,
prefs: {
'credentials_enable_service': false,
'profile': {
'password_manager_enabled': false
},
download: {
prompt_for_download: false,
directory_upgrade: true,
default_directory: 'C:\\downloads\\'
},
},
},
loggingPrefs: {
browser: 'SEVERE'
},
}, ];
// Protractor config
exports.config = {
baseUrl: baseUrl,
directConnect: true,
allScriptsTimeout: 2 * 60 * 1000,
jasmineNodeOpts: {
defaultTimeoutInterval: 3 * 60 * 1000
},
getPageTimeout: 2 * 60 * 1000,
suites: {
smoke: 'protractor/smokeTests/*.scenario.ts',
},
multiCapabilities: browserCapabilities,
framework: 'jasmine2',
onPrepare: function () {
browser.waitForAngularEnabled(true);
require('ts-node').register({
project: 'protractor/tsconfig.json',
});
const jasmineReporters = require('jasmine-reporters');
const jUnitXMLReporter = new jasmineReporters.JUnitXmlReporter({
consolidateAll: false,
savePath: reportPath,
filePrefix: 'xmloutput'
});
const JasmineConsoleReporter = require('jasmine-console-reporter');
const consoleReporter = new JasmineConsoleReporter({
colors: 1,
cleanStack: 1,
verbosity: 4,
listStyle: 'indent',
activity: true,
emoji: true,
beep: true,
timeThreshold: {
ok: 10000,
warn: 15000,
ouch: 30000,
}
});
jasmine.getEnv().addReporter(jUnitXMLReporter);
jasmine.getEnv().addReporter(screenShotReporter);
jasmine.getEnv().addReporter(consoleReporter);
browser.get(browser.baseUrl);
},
beforeLaunch: function () {
return new Promise(function (resolve) {
screenShotReporter.beforeLaunch(resolve);
});
},
afterLaunch: function (exitCode) {
return new Promise(function (resolve) {
screenShotReporter.afterLaunch(resolve.bind(this, exitCode));
});
},
};
First of all you are mentioning this
it is just running on my local machine and not going through the tunnel. Any suggestions
This is not related to the tunnel, but related to:
You still have directConnect: true,, remove it from your config
You added the Sauce Labs credentials to your capabilities, but you should use them in your config file at the root level. Here's and example (it's written for TypeScript, but it should give you an idea about how to set up your config file). The tunnel identifier is correct, you only need to be sure that you are getting the correct tunnel id as #fijiaaron mentioned
Hope this helps
Where are you getting your tunnelIdentifier from?
You want to make sure:
The tunnel is running
You can access the tunnel from where you are testing
If you have a named tunnel (e.g. sc -i myTunnel) then "myTunnel" should be the tunnelIdentifier, not the tunnel id that is shown in the console outnot (i.e. not Tunnel ID: cdceac0e33db4d5fa44093e191dfdfb0)
If you have an unnamed tunnel then you should not need to specify a tunnelIdentifier for it to be used.
If you appear to be using the tunnel but cannot access your local environment, try a manual test session in Sauce Labs and select the tunnel to see if it works there.

sails lifecycle - custom service and config.log

I defined a l.js under /api/services
exports.i = sails.log.info;
exports.v = sails.log.verbose;
exports.err = sails.log.error;
exports.w = sails.log.warn;
exports.d = sails.log.debug;
It works fine but when I want to define custom logger in /config/log.js
let customLogger = new (winston.Logger)({
transports: [
new (winston.transports.Console, winston.transports.File)({
name: 'file.info',
level: 'info',
filename: logPath + '/server-info.log',
json: true
}),
new (winston.transports.Console, winston.transports.File)({
name: 'file.warning',
level: 'warning',
filename: logPath + '/server-warning.log',
json: true
}),
new (winston.transports.File)({
name: 'file.error',
level: 'error',
filename: logPath + '/server-error.log',
json: true
})
]
});
module.exports.log = {
level: 'info',
custom: customLogger,
};
When I use sails.log.error("123"); l.err("456") in controller, only "123" showed in server-error.log file. "456" only print to console.
When I check sails doc about lifecycle, it seems /config/log.js loaded before /api/services. How should I modify my code?
Thanks
Although config/log.js is loaded before api/services/l.js, (my feeling is) assignment of custom logger defined in log.js to sails.log.<level> happens after the service is loaded.
Hence the behavior.
Re-assigning on lifted event fixes this. Use this code in l.js:
exports.err = sails.log.error;
sails.on('lifted', function() {
console.log('lifted event');
// Re-assign here
exports.err = sails.log.error;
});
Do similarly for other methods of the service.

Setting up PostCSS

I am just trying to set up PostCSS and integrate it into my recent workflow. But somehow I don't seem to get it work.
I am using A Mac and gulp as a task runner.
This is the erro I get when starting my watch task:
TypeError: $.postcss is not a function
And this is what my gulpfile.js looks like:
var gulp = require('gulp');
var $ = require('gulp-load-plugins')({
rename: {
'browser-sync': 'browserSync',
'gulp-postcss': 'postcss',
},
pattern: ['gulp-*', 'gulp.*', 'del', 'run-sequence', 'browser-sync'],
DEBUG: false
});
var targetPath = 'dist';
var useBrowserSync = $.util.env.browserSync || true;
gulp.task('sass', function(){
return gulp.src('src/scss/*.{scss,sass,css}')
.pipe($.postcss([
require('precss')()
], {syntax: require('postcss-scss')}))
.pipe(gulp.dest(targetPath + '/public/assets/css'))
.pipe( $.if( useBrowserSync , $.browserSync.stream() ) );
});
gulp.task('watch', ['sass'], function(){
if( useBrowserSync ){
$.browserSync.create();
$.browserSync.init({
proxy: 'localhost/my_project/dist/public/'
});
}
gulp.watch('src/scss/*.{scss,sass,css}', ['sass']);
});
What is going wrong?

Ember-CLI: How to exclude a folder within /public from build

I have Ember-CLI-application with a few thousand static assets (~1GB) and my build time is now about 30sec.
I have tried in my Brocfile.js without success:
var app = new EmberApp({
fingerprint: {
enabled: false,
exclude: ['large_folder']
}
});
Build time with assets: TreeMerger | 29738ms
/ without: TreeMerger | 9182ms.
Any ideas how to speed up the build?
(Ember-CLI 0.1.7)
You have enabled:false, you can set it to true.
Also, on exclude, would be better to say the path for the folder, such as:
If you have a large folder inside images, then you can do it like this:
fingerprint: {
exclude: ['assets/images/large_folder/', 'assets/uploads/other_large_folder/]
}
My own solution is currently to use the postBuild-hook and a symbolic link to the assets folder.
lib/link-after-build/index.js:
var fs = require('fs');
var path = require('path');
module.exports = {
name: 'link-after-build',
// link additional assets after build
postBuild: function(result) {
if (process.env.EMBER_ENV === 'development') {
var buildDirPath = result.directory;
var srcpath = path.resolve("/opt/local/apache2/htdocs/large_folder");
var dstpath = path.resolve(buildDirPath + "/large_folder");
fs.symlinkSync(srcpath,dstpath);
}
}
};