Add Minoos support
This commit is contained in:
73
app/Console/Commands/ScrapeMinoosCommand.php
Normal file
73
app/Console/Commands/ScrapeMinoosCommand.php
Normal file
@@ -0,0 +1,73 @@
|
||||
<?php
|
||||
|
||||
namespace App\Console\Commands;
|
||||
|
||||
use Illuminate\Console\Command;
|
||||
use App\Services\MinoosService;
|
||||
use App\Topic;
|
||||
use App\Source;
|
||||
|
||||
class ScrapeMinoosCommand extends Command
|
||||
{
|
||||
/**
|
||||
* The name and signature of the console command.
|
||||
*
|
||||
* @var string
|
||||
*/
|
||||
protected $signature = 'scrape:minoos';
|
||||
|
||||
/**
|
||||
* The console command description.
|
||||
*
|
||||
* @var string
|
||||
*/
|
||||
protected $description = 'Scrape Minoos';
|
||||
|
||||
/**
|
||||
* Create a new command instance.
|
||||
*
|
||||
* @return void
|
||||
*/
|
||||
public function __construct()
|
||||
{
|
||||
parent::__construct();
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the console command.
|
||||
*
|
||||
* @return int
|
||||
*/
|
||||
public function handle()
|
||||
{
|
||||
$source = Source::where('slug', 'minoos')->first();
|
||||
|
||||
$articles = (new MinoosService)->scrape();
|
||||
|
||||
foreach ($articles as $article) {
|
||||
|
||||
// Attach the relationship between source and article and return the curren article instance
|
||||
$articleModel = $source->articles()->updateOrCreate(["guid" => $article["guid"]],
|
||||
[
|
||||
"title" => $article["title"],
|
||||
"url" => $article["url"],
|
||||
"author" => $article["author"],
|
||||
"featured_image" => $article["image"],
|
||||
"body" => $article["content"],
|
||||
"published_date" => $article["date"],
|
||||
"meta" => [
|
||||
"title" => $article["title"],
|
||||
"highlights" => $article["highlights"]
|
||||
]
|
||||
|
||||
]);
|
||||
|
||||
collect($article["topics"])->each(function($topic) use ($articleModel) {
|
||||
$topicModel = Topic::firstOrCreate(["slug" => $topic["slug"]],["name" => $topic["name"]]);
|
||||
|
||||
$topicModel->articles()->syncWithoutDetaching($articleModel);
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
}
|
@@ -105,6 +105,10 @@ class Kernel extends ConsoleKernel
|
||||
$schedule->command('scrape:adhadhu')->everyFiveMinutes()
|
||||
->runInBackground()
|
||||
->pingOnSuccess(config('app.url') . "/api/ping/adhadhu");
|
||||
|
||||
$schedule->command('scrape:minoos')->everyFiveMinutes()
|
||||
->runInBackground()
|
||||
->pingOnSuccess(config('app.url') . "/api/ping/minoos");
|
||||
}
|
||||
|
||||
|
||||
|
38
app/Services/Feeds/MinoosFeed.php
Normal file
38
app/Services/Feeds/MinoosFeed.php
Normal file
@@ -0,0 +1,38 @@
|
||||
<?php
|
||||
namespace App\Services\Feeds;
|
||||
|
||||
use GuzzleHttp\Client;
|
||||
|
||||
class MinoosFeed implements Feed
|
||||
{
|
||||
protected $client;
|
||||
|
||||
public function __construct()
|
||||
{
|
||||
$this->client = new Client();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all the latest news
|
||||
*
|
||||
* @return array
|
||||
*/
|
||||
public function get() : array
|
||||
{
|
||||
$response = $this->client->request('GET', "https://fili.minoos.mv/api/category/news/posts");
|
||||
$data = json_decode($response->getBody(), true);
|
||||
|
||||
$feeds = [];
|
||||
foreach ($data['data'] as $item) {
|
||||
$feeds[] = [
|
||||
"title" => $item['heading'],
|
||||
"link" => "https://minoos.mv/" . $item['id'],
|
||||
"date" => $item['published_at'],
|
||||
"highlights" => $item['highlights'],
|
||||
];
|
||||
}
|
||||
|
||||
return $feeds;
|
||||
|
||||
}
|
||||
}
|
31
app/Services/MinoosService.php
Normal file
31
app/Services/MinoosService.php
Normal file
@@ -0,0 +1,31 @@
|
||||
<?php
|
||||
|
||||
namespace App\Services;
|
||||
|
||||
use App\Services\Feeds\MinoosFeed;
|
||||
use App\Services\Feeds\OneOnlineFeed;
|
||||
use App\Services\Scrapers\MinoosScraper;
|
||||
use App\Services\Scrapers\OneOnlineScraper;
|
||||
|
||||
class MinoosService
|
||||
{
|
||||
/**
|
||||
*
|
||||
* @return array
|
||||
*/
|
||||
public function scrape(): array
|
||||
{
|
||||
//Return only the rss that contains "news" keyboard in its url
|
||||
$articles = (new MinoosFeed)->get();
|
||||
$articlesitems = [];
|
||||
//Looping through the articles and scraping and while scraping it creates a new instance of the scraper.
|
||||
foreach ($articles as $article) {
|
||||
$scraped_article = (new MinoosScraper)->extract($article["link"], $article["date"], $article["highlights"]);
|
||||
if (!is_null($scraped_article)) {
|
||||
$articlesitems[] = $scraped_article;
|
||||
}
|
||||
}
|
||||
|
||||
return $articlesitems;
|
||||
}
|
||||
}
|
48
app/Services/Scrapers/MinoosScraper.php
Normal file
48
app/Services/Scrapers/MinoosScraper.php
Normal file
@@ -0,0 +1,48 @@
|
||||
<?php
|
||||
|
||||
namespace App\Services\Scrapers;
|
||||
|
||||
use Goutte\Client;
|
||||
use Symfony\Component\DomCrawler\Crawler;
|
||||
use Illuminate\Support\Carbon;
|
||||
|
||||
class MinoosScraper
|
||||
{
|
||||
protected $client;
|
||||
|
||||
public function __construct()
|
||||
{
|
||||
$this->client = new Client();
|
||||
}
|
||||
|
||||
public function extract($url, $date = null, $highlights = null)
|
||||
{
|
||||
$crawler = $this->client->request('GET', $url);
|
||||
|
||||
$title = $crawler->filter('h1')->first()->text();
|
||||
$content = $crawler->filter('.doc-text')->each(function (Crawler $node) {
|
||||
return $node->text();
|
||||
});
|
||||
$image = $crawler->filter('meta[property="og:image"]')->attr('content');
|
||||
|
||||
$topics = $crawler->filter('a[href^="/tags/"]')->each(function (Crawler $node) {
|
||||
return [
|
||||
'name' => $node->text(),
|
||||
'slug' => str_replace("/tags/", "", $node->attr('href'))
|
||||
];
|
||||
});
|
||||
|
||||
return [
|
||||
'source' => 'minoos',
|
||||
'title' => $title,
|
||||
'guid' => basename($url),
|
||||
'content' => $content,
|
||||
'author' => 'unknown',
|
||||
'image' => $image,
|
||||
'highlights' => $highlights,
|
||||
'url' => $url,
|
||||
'date' => Carbon::parse($date)->format("Y-m-d H:i:s"),
|
||||
'topics' => $topics
|
||||
];
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user