Skip to content

Commit

Permalink
no message
Browse files Browse the repository at this point in the history
  • Loading branch information
OwenMelbz committed Oct 12, 2019
1 parent 26f17c1 commit 435fa84
Show file tree
Hide file tree
Showing 18 changed files with 410,963 additions and 48 deletions.
58 changes: 58 additions & 0 deletions app/Checkers/Page.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
<?php

namespace App\Checkers;

use Exception;
use App\DnsScan;
use App\Website;
use App\CrawlObserver;
use Spatie\Crawler\Crawler;
use GuzzleHttp\RequestOptions;
use Whoisdoma\DNSParser\DNSParser;
use SebastianBergmann\Diff\Differ;
use App\Notifications\DnsHasChanged;

class Page
{
private $website;

public function __construct(Website $website)
{
$this->website = $website;
}

public function run()
{
$this->fetch();
$this->compare();
$this->notify();
}

private function fetch()
{
Crawler::create([
RequestOptions::COOKIES => true,
RequestOptions::CONNECT_TIMEOUT => 10,
RequestOptions::TIMEOUT => 10,
RequestOptions::ALLOW_REDIRECTS => false,
RequestOptions::HEADERS => [
'User-Agent' => '',
],
])
->ignoreRobots()
->setConcurrency(2)
->executeJavaScript()
->setCrawlObserver(new CrawlObserver($this->website))
->startCrawling($this->website->url);
}

private function compare()
{

}

private function notify()
{

}
}
12 changes: 6 additions & 6 deletions app/Checkers/Uptime.php
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
use App\UptimeScan;
use GuzzleHttp\Client;
use Illuminate\Support\Str;
use SebastianBergmann\Diff\Differ;
use GuzzleHttp\RequestOptions;
use App\Notifications\WebsiteIsDown;
use App\Notifications\WebsiteIsBackUp;

Expand All @@ -32,13 +32,13 @@ private function fetch()
$response_time = 3001;

$response = $client->request('GET', $this->website->url, [
'on_stats' => function ($stats) use (&$response_time) {
RequestOptions::ON_STATS => function ($stats) use (&$response_time) {
$response_time = $stats->getTransferTime();
},
'verify' => false,
'allow_redirects' => true,
'headers' => [
'User-Agent' => 'Mozilla/5.0+(compatible; UptimeRobot/2.0; http://www.uptimerobot.com/; Odin)'
RequestOptions::VERIFY => false,
RequestOptions::ALLOW_REDIRECTS => true,
RequestOptions::HEADERS => [
'User-Agent' => config('app.user_agent'),
],
]);

Expand Down
48 changes: 48 additions & 0 deletions app/Console/Commands/CrawlSiteCommand.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
<?php

namespace App\Console\Commands;

use App\Website;
use App\Jobs\PageCheck;
use Illuminate\Console\Command;

class CrawlSiteCommand extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'check:pages {website}';

/**
* The console command description.
*
* @var string
*/
protected $description = 'Command description';

/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}

/**
* Execute the console command.
*
* @return mixed
*/
public function handle()
{
$websiteId = $this->argument('website');

PageCheck::dispatchNow(
Website::findOrFail($websiteId)
);
}
}
49 changes: 49 additions & 0 deletions app/CrawlObserver.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
<?php

namespace App;

use Psr\Http\Message\UriInterface;
use Psr\Http\Message\ResponseInterface;
use GuzzleHttp\Exception\RequestException;
use Spatie\Crawler\CrawlObserver as SpatieCrawlObserver;

class CrawlObserver extends SpatieCrawlObserver
{
/**
* @var Website
*/
private $website;

public function __construct(Website $website)
{
$this->website = $website;
}

/**
* Called when the crawler has crawled the given url successfully.
*
* @param UriInterface $url
* @param ResponseInterface $response
* @param UriInterface|null $foundOnUrl
*/
public function crawled(UriInterface $url, ResponseInterface $response, ?UriInterface $foundOnUrl = null)
{
$page = $this->website->crawledPages()->firstOrCreate([
'url' => $url,
]);

$page->save();
}

/**
* Called when the crawler had a problem crawling the given url.
*
* @param UriInterface $url
* @param RequestException $requestException
* @param UriInterface|null $foundOnUrl
*/
public function crawlFailed(UriInterface $url, RequestException $requestException, ?UriInterface $foundOnUrl = null)
{
// TODO: Implement crawlFailed() method.
}
}
10 changes: 10 additions & 0 deletions app/CrawledPage.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<?php

namespace App;

use Illuminate\Database\Eloquent\Model;

class CrawledPage extends Model
{
protected $guarded = [];
}
11 changes: 11 additions & 0 deletions app/HasCrawledPages.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
<?php

namespace App;

trait HasCrawledPages
{
public function crawledPages()
{
return $this->hasMany(CrawledPage::class);
}
}
3 changes: 0 additions & 3 deletions app/HasRobots.php
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
<?php


namespace App;


trait HasRobots
{

public function robots()
{
return $this->hasMany(RobotScan::class);
Expand Down
4 changes: 2 additions & 2 deletions app/Http/Controllers/WebsiteController.php
Original file line number Diff line number Diff line change
Expand Up @@ -180,9 +180,9 @@ public function update(Request $request, Website $website)
public function destroy(Website $website)
{
$this->panel->setEntry($website);

$this->panel->destroy('Website removed.');

Artisan::call('horizon:terminate');

return $this->panel->redirect('index');
Expand Down
43 changes: 43 additions & 0 deletions app/Jobs/PageCheck.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
<?php

namespace App\Jobs;

use App\Website;
use App\Checkers\Page;
use App\Checkers\Certificate;
use Illuminate\Bus\Queueable;
use Illuminate\Queue\SerializesModels;
use Illuminate\Queue\InteractsWithQueue;
use Illuminate\Contracts\Queue\ShouldQueue;
use Illuminate\Foundation\Bus\Dispatchable;

class PageCheck implements ShouldQueue
{
use Dispatchable, InteractsWithQueue, Queueable, SerializesModels;

/**
* @var Website
*/
private $website;

/**
* Create a new job instance.
*
* @param Website $website
*/
public function __construct(Website $website)
{
$this->website = $website;
}

/**
* Execute the job.
*
* @return void
*/
public function handle()
{
$checker = new Page($this->website);
$checker->run();
}
}
10 changes: 6 additions & 4 deletions app/Website.php
Original file line number Diff line number Diff line change
Expand Up @@ -8,18 +8,19 @@
use App\Jobs\UptimeCheck;
use App\Jobs\OpenGraphCheck;
use App\Jobs\CertificateCheck;
use Illuminate\Database\Eloquent\Builder;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Builder;
use Illuminate\Database\Eloquent\Relations\BelongsTo;

class Website extends Model
{
use HasDns;
use HasCrons;
use HasUptime;
use HasRobots;
use HasCertificates;
use HasDns;
use HasOpenGraph;
use HasCrons;
use HasCertificates;
use HasCrawledPages;

protected $fillable = [
'url',
Expand All @@ -30,6 +31,7 @@ class Website extends Model
'robots_enabled',
'dns_enabled',
'cron_enabled',
'crawler_enabled',
'cron_key',
];

Expand Down
1 change: 1 addition & 0 deletions composer.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
"owenmelbz/domain-enforcement": "^0.0.6",
"predis/predis": "^1.1",
"sebastian/diff": "^3.0",
"spatie/crawler": "^4.6",
"spatie/ssl-certificate": "^1.15",
"visualappeal/php-ssllabs-api": "^1.0",
"whoisdoma/dnsparser": "dev-master",
Expand Down
Loading

0 comments on commit 435fa84

Please sign in to comment.