Hama news intergration
This commit is contained in:
73
app/Console/Commands/ScrapeHamaCommand.php
Normal file
73
app/Console/Commands/ScrapeHamaCommand.php
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
<?php
|
||||||
|
|
||||||
|
namespace App\Console\Commands;
|
||||||
|
|
||||||
|
use App\Source;
|
||||||
|
use Illuminate\Console\Command;
|
||||||
|
use App\Services\HamaService;
|
||||||
|
use App\Topic;
|
||||||
|
use Illuminate\Support\Carbon;
|
||||||
|
|
||||||
|
class ScrapeHamaCommand extends Command
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* The name and signature of the console command.
|
||||||
|
*
|
||||||
|
* @var string
|
||||||
|
*/
|
||||||
|
protected $signature = 'scrape:hama';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The console command description.
|
||||||
|
*
|
||||||
|
* @var string
|
||||||
|
*/
|
||||||
|
protected $description = 'Scrape Hama';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new command instance.
|
||||||
|
*
|
||||||
|
* @return void
|
||||||
|
*/
|
||||||
|
public function __construct()
|
||||||
|
{
|
||||||
|
parent::__construct();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the console command.
|
||||||
|
*
|
||||||
|
* @return int
|
||||||
|
*/
|
||||||
|
public function handle()
|
||||||
|
{
|
||||||
|
$source = Source::where('slug', 'hama')->first();
|
||||||
|
|
||||||
|
$articles = (new HamaService)->scrape();
|
||||||
|
|
||||||
|
foreach ($articles as $article) {
|
||||||
|
|
||||||
|
// Attach the relationship between source and article and return the curren article instance
|
||||||
|
$articleModel = $source->articles()->updateOrCreate(["guid" => $article["guid"]],
|
||||||
|
[
|
||||||
|
"title" => $article["title"],
|
||||||
|
"url" => $article["url"],
|
||||||
|
"author" => $article["author"],
|
||||||
|
"featured_image" => $article["image"],
|
||||||
|
"body" => $article["content"],
|
||||||
|
"published_date" => $article["date"],
|
||||||
|
"meta" => [
|
||||||
|
"title" => $article["og_title"]
|
||||||
|
]
|
||||||
|
|
||||||
|
]);
|
||||||
|
|
||||||
|
collect($article["topics"])->each(function($topic) use ($articleModel) {
|
||||||
|
$topicModel = Topic::firstOrCreate(["slug" => $topic["slug"]],["name" => $topic["name"]]);
|
||||||
|
|
||||||
|
$topicModel->articles()->syncWithoutDetaching($articleModel);
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -77,9 +77,13 @@ class Kernel extends ConsoleKernel
|
|||||||
->runInBackground()
|
->runInBackground()
|
||||||
->pingOnSuccess(env('APP_URL') . "/api/ping/miadhu");
|
->pingOnSuccess(env('APP_URL') . "/api/ping/miadhu");
|
||||||
|
|
||||||
$schedule->command('scrape:vnews')->everyFiveMinutes()
|
$schedule->command('scrape:vnews')->everyFiveMinutes()
|
||||||
->runInBackground()
|
->runInBackground()
|
||||||
->pingOnSuccess(env('APP_URL') . "/api/ping/vnews");
|
->pingOnSuccess(env('APP_URL') . "/api/ping/vnews");
|
||||||
|
|
||||||
|
$schedule->command('scrape:hama')->everyFiveMinutes()
|
||||||
|
->runInBackground()
|
||||||
|
->pingOnSuccess(env('APP_URL') . "/api/ping/hama");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
39
app/Services/Feeds/HamaFeed.php
Normal file
39
app/Services/Feeds/HamaFeed.php
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
<?php
|
||||||
|
namespace App\Services\Feeds;
|
||||||
|
|
||||||
|
use Goutte\Client;
|
||||||
|
|
||||||
|
class HamaFeed implements Feed
|
||||||
|
{
|
||||||
|
protected $client;
|
||||||
|
|
||||||
|
public function __construct()
|
||||||
|
{
|
||||||
|
$this->client = new Client();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Get all the latest news
|
||||||
|
*
|
||||||
|
* @return array
|
||||||
|
*/
|
||||||
|
public function get() : array
|
||||||
|
{
|
||||||
|
|
||||||
|
$crawler = $this->client->request('GET', "https://hama.mv/");
|
||||||
|
|
||||||
|
$feeds = [];
|
||||||
|
$crawler->filter('div[id*="latest"] div[class*="col-md-3 col-6"] a')->each(function ($node) use (&$feeds) {
|
||||||
|
|
||||||
|
|
||||||
|
$feeds[] = [
|
||||||
|
"title" => $node->filter('h5')->first()->text(),
|
||||||
|
"link" => $node->attr('href'),
|
||||||
|
"date" => $node->filter('.datetime')->first()->text()
|
||||||
|
];
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
return $feeds;
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
27
app/Services/HamaService.php
Normal file
27
app/Services/HamaService.php
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
<?php
|
||||||
|
|
||||||
|
namespace App\Services;
|
||||||
|
|
||||||
|
use App\Services\Feeds\HamaFeed;
|
||||||
|
use App\Services\Scrapers\HamaScraper;
|
||||||
|
|
||||||
|
class HamaService
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* Scrap all the rss articles from Sun
|
||||||
|
*
|
||||||
|
* @return array
|
||||||
|
*/
|
||||||
|
public function scrape(): array
|
||||||
|
{
|
||||||
|
//Return only the rss that contains "news" keyboard in its url
|
||||||
|
$articles = (new HamaFeed)->get();
|
||||||
|
$articlesitems = [];
|
||||||
|
//Looping through the articles and scraping and while scraping it creates a new instance of the scraper.
|
||||||
|
foreach ($articles as $article) {
|
||||||
|
$articlesitems[] = (new HamaScraper)->extract($article["link"], $article["date"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return $articlesitems;
|
||||||
|
}
|
||||||
|
}
|
||||||
59
app/Services/Scrapers/HamaScraper.php
Normal file
59
app/Services/Scrapers/HamaScraper.php
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
<?php
|
||||||
|
|
||||||
|
namespace App\Services\Scrapers;
|
||||||
|
|
||||||
|
use Exception;
|
||||||
|
use Goutte\Client;
|
||||||
|
use Illuminate\Support\Carbon;
|
||||||
|
|
||||||
|
class HamaScraper
|
||||||
|
{
|
||||||
|
protected $client;
|
||||||
|
|
||||||
|
protected $title;
|
||||||
|
protected $content;
|
||||||
|
protected $author = "unknown";
|
||||||
|
protected $topics;
|
||||||
|
|
||||||
|
public function __construct()
|
||||||
|
{
|
||||||
|
$this->client = new Client;
|
||||||
|
}
|
||||||
|
|
||||||
|
public function extract($url, $date)
|
||||||
|
{
|
||||||
|
$crawler = $this->client->request('GET', $url);
|
||||||
|
|
||||||
|
$crawler->filter('.body > p')->each(function ($node) {
|
||||||
|
$this->content[] = $node->text();
|
||||||
|
});
|
||||||
|
|
||||||
|
if ($crawler->filter('.author_name')->count() > 0) {
|
||||||
|
$this->author = $crawler->filter('.author_name')->first()->text();
|
||||||
|
}
|
||||||
|
|
||||||
|
$crawler->filter('.article-tags a')->each(function ($node) {
|
||||||
|
$this->topics[] = [
|
||||||
|
"name" => $node->text(),
|
||||||
|
"slug" => str_replace("https://hama.mv/", "", $node->attr('href'))
|
||||||
|
];
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
//Remove all the alphabets from string
|
||||||
|
//preg_replace("/[a-zA-Z]/", "",$string);
|
||||||
|
return [
|
||||||
|
'source' => 'Hama',
|
||||||
|
'title' => $crawler->filter('h1')->first()->text(),
|
||||||
|
'og_title' => $crawler->filter('meta[property*="og:title"]')->first()->attr('content'),
|
||||||
|
'image' => $crawler->filter("figure > img")->first()->attr('data-src'),
|
||||||
|
'content' => $this->content,
|
||||||
|
'url' => $url,
|
||||||
|
'date' => Carbon::parse($date)->format("Y-m-d H:i:s"),
|
||||||
|
'guid' => basename($url),
|
||||||
|
'author' => $this->author,
|
||||||
|
'topics' => $this->topics
|
||||||
|
];
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -94,4 +94,5 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</x-layout>
|
</x-layout>
|
||||||
Reference in New Issue
Block a user