Miadhu News Support
This commit is contained in:
72
app/Console/Commands/ScrapeMiadhuCommand.php
Normal file
72
app/Console/Commands/ScrapeMiadhuCommand.php
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
<?php
|
||||||
|
|
||||||
|
namespace App\Console\Commands;
|
||||||
|
|
||||||
|
use Illuminate\Console\Command;
|
||||||
|
use App\Services\MiadhuService;
|
||||||
|
use App\Topic;
|
||||||
|
use App\Source;
|
||||||
|
|
||||||
|
class ScrapeMiadhuCommand extends Command
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* The name and signature of the console command.
|
||||||
|
*
|
||||||
|
* @var string
|
||||||
|
*/
|
||||||
|
protected $signature = 'scrape:miadhu';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The console command description.
|
||||||
|
*
|
||||||
|
* @var string
|
||||||
|
*/
|
||||||
|
protected $description = 'Scrape Miadhu';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new command instance.
|
||||||
|
*
|
||||||
|
* @return void
|
||||||
|
*/
|
||||||
|
public function __construct()
|
||||||
|
{
|
||||||
|
parent::__construct();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the console command.
|
||||||
|
*
|
||||||
|
* @return int
|
||||||
|
*/
|
||||||
|
public function handle()
|
||||||
|
{
|
||||||
|
$source = Source::where('slug', 'miadhu')->first();
|
||||||
|
|
||||||
|
$articles = (new MiadhuService)->scrape();
|
||||||
|
|
||||||
|
foreach ($articles as $article) {
|
||||||
|
|
||||||
|
// Attach the relationship between source and article and return the curren article instance
|
||||||
|
$articleModel = $source->articles()->updateOrCreate(["guid" => $article["guid"]],
|
||||||
|
[
|
||||||
|
"title" => $article["title"],
|
||||||
|
"url" => $article["url"],
|
||||||
|
"author" => $article["author"],
|
||||||
|
"featured_image" => $article["image"],
|
||||||
|
"body" => $article["content"],
|
||||||
|
"published_date" => $article["date"],
|
||||||
|
"meta" => [
|
||||||
|
"title" => $article["og_title"]
|
||||||
|
]
|
||||||
|
|
||||||
|
]);
|
||||||
|
|
||||||
|
collect($article["topics"])->each(function($topic) use ($articleModel) {
|
||||||
|
$topicModel = Topic::firstOrCreate(["slug" => $topic["slug"]],["name" => $topic["name"]]);
|
||||||
|
|
||||||
|
$topicModel->articles()->syncWithoutDetaching($articleModel);
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -60,6 +60,9 @@ class Kernel extends ConsoleKernel
|
|||||||
|
|
||||||
$schedule->command('scrape:feshun')->everyFiveMinutes()
|
$schedule->command('scrape:feshun')->everyFiveMinutes()
|
||||||
->pingOnSuccess(env('APP_URL') . "/api/ping/feshun");
|
->pingOnSuccess(env('APP_URL') . "/api/ping/feshun");
|
||||||
|
|
||||||
|
$schedule->command('scrape:miadhu')->everyFiveMinutes()
|
||||||
|
->pingOnSuccess(env('APP_URL') . "/api/ping/miadhu");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
44
app/Services/Feeds/MiadhuFeed.php
Normal file
44
app/Services/Feeds/MiadhuFeed.php
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
<?php
|
||||||
|
|
||||||
|
namespace App\Services\Feeds;
|
||||||
|
|
||||||
|
use Goutte\Client;
|
||||||
|
|
||||||
|
class MiadhuFeed implements Feed
|
||||||
|
{
|
||||||
|
protected $client;
|
||||||
|
|
||||||
|
public function __construct()
|
||||||
|
{
|
||||||
|
$this->client = new Client();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Return the latest articles from avas
|
||||||
|
*
|
||||||
|
* @return array
|
||||||
|
*/
|
||||||
|
public function get(): array
|
||||||
|
{
|
||||||
|
|
||||||
|
$crawler = $this->client->request('GET', "https://miadhu.mv");
|
||||||
|
|
||||||
|
$feeds = [];
|
||||||
|
$dates = [];
|
||||||
|
|
||||||
|
// scrape the dates for the articles
|
||||||
|
$crawler->filter('.middle div[class*="col-md-3 col-6 news-block"] em')->each(function ($node) use (&$dates) {
|
||||||
|
$dates[] = $node->text();
|
||||||
|
});
|
||||||
|
|
||||||
|
$crawler->filter('.middle div[class*="col-md-3 col-6 news-block"] h2 a')->each(function ($node, $i) use (&$feeds, $dates) {
|
||||||
|
$feeds[] = [
|
||||||
|
"title" => $node->text(),
|
||||||
|
"link" => $node->attr('href'),
|
||||||
|
"date" => $dates[$i]
|
||||||
|
];
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
return $feeds;
|
||||||
|
}
|
||||||
|
}
|
27
app/Services/MiadhuService.php
Normal file
27
app/Services/MiadhuService.php
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
<?php
|
||||||
|
|
||||||
|
namespace App\Services;
|
||||||
|
|
||||||
|
use App\Services\Feeds\MiadhuFeed;
|
||||||
|
use App\Services\Scrapers\MiadhuScraper;
|
||||||
|
|
||||||
|
class MiadhuService
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* Scrap all the rss articles from Sun
|
||||||
|
*
|
||||||
|
* @return array
|
||||||
|
*/
|
||||||
|
public function scrape(): array
|
||||||
|
{
|
||||||
|
//Return only the rss that contains "news" keyboard in its url
|
||||||
|
$articles = (new MiadhuFeed)->get();
|
||||||
|
$articlesitems = [];
|
||||||
|
//Looping through the articles and scraping and while scraping it creates a new instance of the scraper.
|
||||||
|
foreach ($articles as $article) {
|
||||||
|
$articlesitems[] = (new MiadhuScraper)->extract($article["link"], $article["date"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return $articlesitems;
|
||||||
|
}
|
||||||
|
}
|
53
app/Services/Scrapers/MiadhuScraper.php
Normal file
53
app/Services/Scrapers/MiadhuScraper.php
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
<?php
|
||||||
|
|
||||||
|
namespace App\Services\Scrapers;
|
||||||
|
|
||||||
|
use Goutte\Client;
|
||||||
|
use Illuminate\Support\Carbon;
|
||||||
|
|
||||||
|
class MiadhuScraper
|
||||||
|
{
|
||||||
|
protected $client;
|
||||||
|
|
||||||
|
protected $title;
|
||||||
|
protected $content;
|
||||||
|
protected $author = "unknown";
|
||||||
|
|
||||||
|
public function __construct()
|
||||||
|
{
|
||||||
|
$this->client = new Client;
|
||||||
|
}
|
||||||
|
|
||||||
|
public function extract($url, $date)
|
||||||
|
{
|
||||||
|
|
||||||
|
$crawler = $this->client->request('GET', $url);
|
||||||
|
|
||||||
|
$crawler->filter('.read-dv-text > p')->each(function ($node) {
|
||||||
|
$this->content[] = $node->text();
|
||||||
|
});
|
||||||
|
|
||||||
|
if ($crawler->filter('.author-name')->count() == 1) {
|
||||||
|
$this->author = $crawler->filter('.author-name')->first()->text();
|
||||||
|
}
|
||||||
|
//Remove all the alphabets from string
|
||||||
|
//preg_replace("/[a-zA-Z]/", "",$string);
|
||||||
|
return [
|
||||||
|
'source' => 'Miadhu News',
|
||||||
|
'title' => $crawler->filter('h1')->first()->text(),
|
||||||
|
'og_title' => $crawler->filter('meta[property*="og:title"]')->first()->attr('content'),
|
||||||
|
'image' => $crawler->filter(".col-md-12 img")->first()->attr('src'),
|
||||||
|
'content' => $this->content,
|
||||||
|
'url' => $url,
|
||||||
|
'date' => Carbon::parse($date)->format("Y-m-d H:i:s"),
|
||||||
|
'guid' => str_replace("https://miadhu.mv/article/read/", "", $url),
|
||||||
|
'author' => $this->author,
|
||||||
|
'topics' => [
|
||||||
|
[
|
||||||
|
"name" => "ވަކި މަޢުލޫއެއް ނޭންގެ",
|
||||||
|
"slug" => "no-specific-topic"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
];
|
||||||
|
}
|
||||||
|
}
|
Reference in New Issue
Block a user