Get the webpage all url or link list using selenium webdriver

package seleniumLearnWebDriver;
import org.testng.annotations.Test;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.testng.annotations.BeforeTest;

public class GetAllUrlLinks {
    WebDriver driver;
    private static String[] links = null;     
    private static int linksCount = 0;

@Test   
      public void PrintAllUrls() throws Exception{   
      driver.manage().timeouts().implicitlyWait(9,TimeUnit.SECONDS);   
      driver.get("http://www.seleniumlearn.com/");       
      List<WebElement> alllinks = driver.findElements(By.tagName("a"));     
      linksCount = alllinks.size();     
      links= new String[linksCount];     
      System.out.println("Print All links on a web page :");     
      for(int i=0;i<linksCount;i++)    
       {     
      links[i] = alllinks.get(i).getAttribute("href");     
      System.out.println(alllinks.get(i).getAttribute("href")); 
      } 
      }
  @BeforeTest
  public void beforeTest() {
    driver = new FirefoxDriver();
        //System.setProperty("webdriver.chrome.driver","D:\\lib\\chromedriver.exe"); //--->chrome browser path
        //driver=new ChromeDriver();
  }

}